diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 5a29379ba4818..17dea1ba4cdec 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -259,9 +259,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chalk-derive" -version = "0.103.0" +version = "0.104.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb4899682de915ca7c0b025bdd0a3d34c75fe12184122fda6805a7baddaa293c" +checksum = "9ea9b1e80910f66ae87c772247591432032ef3f6a67367ff17f8343db05beafa" dependencies = [ "proc-macro2", "quote", @@ -271,43 +271,14 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.103.0" +version = "0.104.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90a37d2ab99352b4caca135061e7b4ac67024b648c28ed0b787feec4bea4caed" +checksum = "7047a516de16226cd17344d41a319d0ea1064bf9e60bd612ab341ab4a34bbfa8" dependencies = [ "bitflags 2.9.1", "chalk-derive", ] -[[package]] -name = "chalk-recursive" -version = "0.103.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c855be60e646664bc37c2496d3dc81ca5ef60520930e5e0f0057a0575aff6c19" -dependencies = [ - "chalk-derive", - "chalk-ir", - "chalk-solve", - "rustc-hash 1.1.0", - "tracing", -] - -[[package]] -name = "chalk-solve" -version = "0.103.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "477ac6cdfd2013e9f93b09b036c2b607a67b2e728f4777b8422d55a79e9e3a34" -dependencies = [ - "chalk-derive", - "chalk-ir", - "ena", - "indexmap", - "itertools 0.12.1", - "petgraph", - "rustc-hash 1.1.0", - "tracing", -] - [[package]] name = "clap" version = "4.5.42" @@ -445,6 +416,17 @@ dependencies = [ "powerfmt", ] +[[package]] +name = "derive-where" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "510c292c8cf384b1a340b816a9a6cf2599eb8f566a44949024af88418000c50b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "derive_arbitrary" version = "1.4.1" @@ -563,12 +545,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - [[package]] name = "flate2" version = "1.1.2" @@ -695,7 +671,8 @@ dependencies = [ "hir-ty", "indexmap", "intern", - "itertools 0.14.0", + "itertools", + "ra-ap-rustc_type_ir", "rustc-hash 2.1.1", "smallvec", "span", @@ -705,6 +682,8 @@ dependencies = [ "test-fixture", "test-utils", "tracing", + "tracing-subscriber", + "tracing-tree", "triomphe", "tt", ] @@ -725,7 +704,7 @@ dependencies = [ "hir-expand", "indexmap", "intern", - "itertools 0.14.0", + "itertools", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "mbe", "query-group-macro", @@ -759,7 +738,7 @@ dependencies = [ "either", "expect-test", "intern", - "itertools 0.14.0", + "itertools", "mbe", "parser", "query-group-macro", @@ -785,8 +764,6 @@ dependencies = [ "bitflags 2.9.1", "chalk-derive", "chalk-ir", - "chalk-recursive", - "chalk-solve", "cov-mark", "either", "ena", @@ -795,14 +772,17 @@ dependencies = [ "hir-expand", "indexmap", "intern", - "itertools 0.14.0", + "itertools", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "oorandom", "project-model", "query-group-macro", "ra-ap-rustc_abi", + "ra-ap-rustc_ast_ir", "ra-ap-rustc_index", + "ra-ap-rustc_next_trait_solver", "ra-ap-rustc_pattern_analysis", + "ra-ap-rustc_type_ir", "rustc-hash 2.1.1", "rustc_apfloat", "salsa", @@ -932,7 +912,7 @@ dependencies = [ "ide-db", "ide-diagnostics", "ide-ssr", - "itertools 0.14.0", + "itertools", "nohash-hasher", "oorandom", "profile", @@ -960,7 +940,7 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools 0.14.0", + "itertools", "smallvec", "stdx", "syntax", @@ -978,7 +958,7 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools 0.14.0", + "itertools", "smallvec", "stdx", "syntax", @@ -1001,7 +981,7 @@ dependencies = [ "fst", "hir", "indexmap", - "itertools 0.14.0", + "itertools", "line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr", "nohash-hasher", @@ -1032,7 +1012,7 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools 0.14.0", + "itertools", "paths", "serde_json", "stdx", @@ -1050,7 +1030,7 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools 0.14.0", + "itertools", "parser", "syntax", "test-fixture", @@ -1129,15 +1109,6 @@ dependencies = [ "memoffset", ] -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.14.0" @@ -1266,7 +1237,7 @@ dependencies = [ "hir-expand", "ide-db", "intern", - "itertools 0.14.0", + "itertools", "proc-macro-api", "project-model", "span", @@ -1294,7 +1265,7 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "lsp-server" -version = "0.7.8" +version = "0.7.9" dependencies = [ "anyhow", "crossbeam-channel", @@ -1310,9 +1281,9 @@ dependencies = [ [[package]] name = "lsp-server" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9462c4dc73e17f971ec1f171d44bfffb72e65a130117233388a0ebc7ec5656f9" +checksum = "7d6ada348dbc2703cbe7637b2dda05cff84d3da2819c24abcb305dd613e0ba2e" dependencies = [ "crossbeam-channel", "log", @@ -1343,7 +1314,7 @@ dependencies = [ "expect-test", "intern", "parser", - "ra-ap-rustc_lexer 0.123.0", + "ra-ap-rustc_lexer", "rustc-hash 2.1.1", "smallvec", "span", @@ -1579,8 +1550,8 @@ dependencies = [ "drop_bomb", "edition", "expect-test", - "ra-ap-rustc_lexer 0.123.0", - "rustc-literal-escaper", + "ra-ap-rustc_lexer", + "rustc-literal-escaper 0.0.4", "stdx", "tracing", ] @@ -1623,16 +1594,6 @@ dependencies = [ "libc", ] -[[package]] -name = "petgraph" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" -dependencies = [ - "fixedbitset", - "indexmap", -] - [[package]] name = "pin-project-lite" version = "0.2.16" @@ -1700,7 +1661,7 @@ dependencies = [ "object", "paths", "proc-macro-test", - "ra-ap-rustc_lexer 0.123.0", + "ra-ap-rustc_lexer", "span", "syntax-bridge", "temp-dir", @@ -1767,7 +1728,7 @@ dependencies = [ "cfg", "expect-test", "intern", - "itertools 0.14.0", + "itertools", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", "rustc-hash 2.1.1", @@ -1846,9 +1807,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.123.0" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f18c877575c259d127072e9bfc41d985202262fb4d6bfdae3d1252147c2562c2" +checksum = "597bb303548ddcca3a2eb05af254508aaf39cf334d4350bb5da51de1eb728859" dependencies = [ "bitflags 2.9.1", "ra-ap-rustc_hashes", @@ -1856,20 +1817,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "ra-ap-rustc_ast_ir" +version = "0.132.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78982b4e4432ee4b938e47bb5c8f1a5a5a88c27c782f193aefcc12a3250bd2e2" + [[package]] name = "ra-ap-rustc_hashes" -version = "0.123.0" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2439ed1df3472443133b66949f81080dff88089b42f825761455463709ee1cad" +checksum = "2f7f33a422f724cc1ab43972cdd76a556b17fc256f301d23be620adfc8351df7" dependencies = [ "rustc-stable-hash", ] [[package]] name = "ra-ap-rustc_index" -version = "0.123.0" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a24fe0be21be1f8ebc21dcb40129214fb4cefb0f2753f3d46b6dbe656a1a45" +checksum = "8a6006023c8be18c3ac225d69c1b42f55b3f597f3db03fb40764b4cf1454fd13" dependencies = [ "ra-ap-rustc_index_macros", "smallvec", @@ -1877,9 +1844,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.123.0" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "844a27ddcad0116facae2df8e741fd788662cf93dc13029cd864f2b8013b81f9" +checksum = "9217c29f7fcc30d07ed13a62262144f665410ef1460202599ae924f9ae47ad78" dependencies = [ "proc-macro2", "quote", @@ -1888,9 +1855,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.121.0" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22944e31fb91e9b3e75bcbc91e37d958b8c0825a6160927f2856831d2ce83b36" +checksum = "573ad4f5da620e8ba1849d8862866abd7bc765c3d81cb2488c3ecbef33ce2c69" dependencies = [ "memchr", "unicode-properties", @@ -1898,31 +1865,33 @@ dependencies = [ ] [[package]] -name = "ra-ap-rustc_lexer" -version = "0.123.0" +name = "ra-ap-rustc_next_trait_solver" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b734cfcb577d09877799a22742f1bd398be6c00bc428d9de56d48d11ece5771" +checksum = "0d42b095b99e988aeb94622ae62ebda4b7de55d7d98846eec352b8a5a2b8a858" dependencies = [ - "memchr", - "unicode-properties", - "unicode-xid", + "derive-where", + "ra-ap-rustc_index", + "ra-ap-rustc_type_ir", + "ra-ap-rustc_type_ir_macros", + "tracing", ] [[package]] name = "ra-ap-rustc_parse_format" -version = "0.121.0" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81057891bc2063ad9e353f29462fbc47a0f5072560af34428ae9313aaa5e9d97" +checksum = "a21b4e95cb45f840c172493c05f5b9471cf44adb2eccf95d76a0d76e88007870" dependencies = [ - "ra-ap-rustc_lexer 0.121.0", - "rustc-literal-escaper", + "ra-ap-rustc_lexer", + "rustc-literal-escaper 0.0.5", ] [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.123.0" +version = "0.132.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75b0ee1f059b9dea0818c6c7267478926eee95ba4c7dcf89c8db32fa165d3904" +checksum = "b6aeacef1248066f7b67e7296ef135eeab6446d5d2a5c7f02b8d7b747b41e39b" dependencies = [ "ra-ap-rustc_index", "rustc-hash 2.1.1", @@ -1931,6 +1900,38 @@ dependencies = [ "tracing", ] +[[package]] +name = "ra-ap-rustc_type_ir" +version = "0.132.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e35ee9e052406035016b8e6d54ca202bc39ccba1702780b33b2d5fb10d1da8" +dependencies = [ + "arrayvec", + "bitflags 2.9.1", + "derive-where", + "ena", + "indexmap", + "ra-ap-rustc_ast_ir", + "ra-ap-rustc_index", + "ra-ap-rustc_type_ir_macros", + "rustc-hash 2.1.1", + "smallvec", + "thin-vec", + "tracing", +] + +[[package]] +name = "ra-ap-rustc_type_ir_macros" +version = "0.132.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b934c956b0c88df8176803416b69d85d2c392a69c8aa794a4c338f22c527d38" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "rayon" version = "1.10.0" @@ -2005,9 +2006,9 @@ dependencies = [ "ide-ssr", "indexmap", "intern", - "itertools 0.14.0", + "itertools", "load-cargo", - "lsp-server 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)", + "lsp-server 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", "memchr", "mimalloc", @@ -2073,6 +2074,12 @@ version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab03008eb631b703dd16978282ae36c73282e7922fe101a4bd072a40ecea7b8b" +[[package]] +name = "rustc-literal-escaper" +version = "0.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ee29da77c5a54f42697493cd4c9b9f31b74df666a6c04dfc4fde77abe0438b" + [[package]] name = "rustc-stable-hash" version = "0.1.2" @@ -2337,7 +2344,7 @@ dependencies = [ "backtrace", "crossbeam-channel", "crossbeam-utils", - "itertools 0.14.0", + "itertools", "jod-thread", "libc", "miow", @@ -2373,12 +2380,12 @@ version = "0.0.0" dependencies = [ "either", "expect-test", - "itertools 0.14.0", + "itertools", "parser", "rayon", "rowan", "rustc-hash 2.1.1", - "rustc-literal-escaper", + "rustc-literal-escaper 0.0.4", "rustc_apfloat", "smol_str", "stdx", @@ -2671,9 +2678,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" dependencies = [ "sharded-slab", "thread_local", @@ -2706,7 +2713,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "intern", - "ra-ap-rustc_lexer 0.123.0", + "ra-ap-rustc_lexer", "stdx", "text-size", ] @@ -3222,7 +3229,7 @@ dependencies = [ "edition", "either", "flate2", - "itertools 0.14.0", + "itertools", "proc-macro2", "quote", "stdx", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index e7cf0212bf2a8..0401367f78647 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -37,9 +37,7 @@ debug = 2 [patch.'crates-io'] # rowan = { path = "../rowan" } -# chalk-solve = { path = "../chalk/chalk-solve" } # chalk-ir = { path = "../chalk/chalk-ir" } -# chalk-recursive = { path = "../chalk/chalk-recursive" } # chalk-derive = { path = "../chalk/chalk-derive" } # line-index = { path = "lib/line-index" } # la-arena = { path = "lib/la-arena" } @@ -89,18 +87,21 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } edition = { path = "./crates/edition", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.123", default-features = false } -ra-ap-rustc_parse_format = { version = "0.121", default-features = false } -ra-ap-rustc_index = { version = "0.123", default-features = false } -ra-ap-rustc_abi = { version = "0.123", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.123", default-features = false } +ra-ap-rustc_lexer = { version = "0.132", default-features = false } +ra-ap-rustc_parse_format = { version = "0.132", default-features = false } +ra-ap-rustc_index = { version = "0.132", default-features = false } +ra-ap-rustc_abi = { version = "0.132", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.132", default-features = false } +ra-ap-rustc_ast_ir = { version = "0.132", default-features = false } +ra-ap-rustc_type_ir = { version = "0.132", default-features = false } +ra-ap-rustc_next_trait_solver = { version = "0.132", default-features = false } # local crates that aren't published to crates.io. These should not have versions. # in-tree crates that are published separately and follow semver. See lib/README.md line-index = { version = "0.1.2" } la-arena = { version = "0.3.1" } -lsp-server = { version = "0.7.8" } +lsp-server = { version = "0.7.9" } # non-local crates anyhow = "1.0.98" @@ -108,10 +109,8 @@ arrayvec = "0.7.6" bitflags = "2.9.1" cargo_metadata = "0.21.0" camino = "1.1.10" -chalk-solve = { version = "0.103.0", default-features = false } -chalk-ir = "0.103.0" -chalk-recursive = { version = "0.103.0", default-features = false } -chalk-derive = "0.103.0" +chalk-ir = "0.104.0" +chalk-derive = "0.104.0" crossbeam-channel = "0.5.15" dissimilar = "1.0.10" dot = "0.1.4" @@ -125,11 +124,11 @@ memmap2 = "0.9.5" nohash-hasher = "0.2.0" oorandom = "11.1.5" object = { version = "0.36.7", default-features = false, features = [ - "std", - "read_core", - "elf", - "macho", - "pe", + "std", + "read_core", + "elf", + "macho", + "pe", ] } process-wrap = { version = "8.2.1", features = ["std"] } pulldown-cmark-to-cmark = "10.0.4" @@ -139,9 +138,9 @@ rowan = "=0.15.15" # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work # on impls without it salsa = { version = "0.23.0", default-features = true, features = [ - "rayon", - "salsa_unstable", - "macros", + "rayon", + "salsa_unstable", + "macros", ] } salsa-macros = "0.23.0" semver = "1.0.26" @@ -151,22 +150,22 @@ serde_json = "1.0.140" rustc-hash = "2.1.1" rustc-literal-escaper = "0.0.4" smallvec = { version = "1.15.1", features = [ - "const_new", - "union", - "const_generics", + "const_new", + "union", + "const_generics", ] } smol_str = "0.3.2" temp-dir = "0.1.16" text-size = "1.1.1" tracing = "0.1.41" tracing-tree = "0.4.0" -tracing-subscriber = { version = "0.3.19", default-features = false, features = [ - "registry", - "fmt", - "local-time", - "std", - "time", - "tracing-log", +tracing-subscriber = { version = "0.3.20", default-features = false, features = [ + "registry", + "fmt", + "local-time", + "std", + "time", + "tracing-log", ] } triomphe = { version = "0.1.14", default-features = false, features = ["std"] } url = "2.5.4" @@ -176,7 +175,7 @@ xshell = "0.2.7" dashmap = { version = "=6.1.0", features = ["raw-api", "inline"] } # We need to freeze the version of the crate, as it needs to match with dashmap hashbrown = { version = "0.14.*", features = [ - "inline-more", + "inline-more", ], default-features = false } [workspace.lints.rust] diff --git a/src/tools/rust-analyzer/README.md b/src/tools/rust-analyzer/README.md index 4360dea4a113c..cb3a41eec5a62 100644 --- a/src/tools/rust-analyzer/README.md +++ b/src/tools/rust-analyzer/README.md @@ -4,8 +4,21 @@ alt="rust-analyzer logo">

-rust-analyzer is a modular compiler frontend for the Rust language. -It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust. +rust-analyzer is a language server that provides IDE functionality for +writing Rust programs. You can use it with any editor that supports +the [Language Server +Protocol](https://microsoft.github.io/language-server-protocol/) (VS +Code, Vim, Emacs, Zed, etc). + +rust-analyzer features include go-to-definition, find-all-references, +refactorings and code completion. rust-analyzer also supports +integrated formatting (with rustfmt) and integrated diagnostics (with +rustc and clippy). + +Internally, rust-analyzer is structured as a set of libraries for +analyzing Rust code. See +[Architecture](https://rust-analyzer.github.io/book/contributing/architecture.html) +in the manual. ## Quick Start diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 0bf4fbdfbd691..cac74778a26b0 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -295,8 +295,6 @@ impl CrateDisplayName { } } -pub type TargetLayoutLoadResult = Result, Arc>; - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum ReleaseChannel { Stable, @@ -929,7 +927,7 @@ mod tests { use super::{CrateGraphBuilder, CrateName, CrateOrigin, Edition::Edition2018, Env, FileId}; fn empty_ws_data() -> Arc { - Arc::new(CrateWorkspaceData { data_layout: Err("".into()), toolchain: None }) + Arc::new(CrateWorkspaceData { target: Err("".into()), toolchain: None }) } #[test] diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index b8eadb608fea5..0e411bcfae60e 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -6,8 +6,14 @@ pub use salsa_macros; // FIXME: Rename this crate, base db is non descriptive mod change; mod input; +pub mod target; -use std::{cell::RefCell, hash::BuildHasherDefault, panic, sync::Once}; +use std::{ + cell::RefCell, + hash::BuildHasherDefault, + panic, + sync::{Once, atomic::AtomicUsize}, +}; pub use crate::{ change::FileChange, @@ -15,8 +21,7 @@ pub use crate::{ BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap, DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError, - ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, - UniqueCrateData, + ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, UniqueCrateData, }, }; use dashmap::{DashMap, mapref::entry::Entry}; @@ -30,6 +35,8 @@ use triomphe::Arc; pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet}; pub type FxIndexSet = indexmap::IndexSet; +pub type FxIndexMap = + indexmap::IndexMap>; #[macro_export] macro_rules! impl_intern_key { @@ -326,13 +333,33 @@ pub trait SourceDatabase: salsa::Database { #[doc(hidden)] fn crates_map(&self) -> Arc; + + fn nonce_and_revision(&self) -> (Nonce, salsa::Revision); +} + +static NEXT_NONCE: AtomicUsize = AtomicUsize::new(0); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Nonce(usize); + +impl Default for Nonce { + #[inline] + fn default() -> Self { + Nonce::new() + } +} + +impl Nonce { + #[inline] + pub fn new() -> Nonce { + Nonce(NEXT_NONCE.fetch_add(1, std::sync::atomic::Ordering::SeqCst)) + } } /// Crate related data shared by the whole workspace. #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct CrateWorkspaceData { - // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query - pub data_layout: TargetLayoutLoadResult, + pub target: Result, /// Toolchain version used to compile the crate. pub toolchain: Option, } diff --git a/src/tools/rust-analyzer/crates/base-db/src/target.rs b/src/tools/rust-analyzer/crates/base-db/src/target.rs new file mode 100644 index 0000000000000..19d3407bf3c80 --- /dev/null +++ b/src/tools/rust-analyzer/crates/base-db/src/target.rs @@ -0,0 +1,50 @@ +//! Information about the target. + +use std::fmt; + +use triomphe::Arc; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Arch { + // Only what we need is present here. + Wasm32, + Wasm64, + Other, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub struct TargetData { + pub data_layout: Box, + pub arch: Arch, +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct TargetLoadError(Arc); + +impl fmt::Debug for TargetLoadError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&self.0, f) + } +} + +impl fmt::Display for TargetLoadError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} + +impl std::error::Error for TargetLoadError {} + +impl From for TargetLoadError { + fn from(value: String) -> Self { + Self(value.into()) + } +} + +impl From<&str> for TargetLoadError { + fn from(value: &str) -> Self { + Self(value.into()) + } +} + +pub type TargetLoadResult = Result; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index 53250510f875c..b4fcfa11aea74 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -554,7 +554,6 @@ impl AttrsWithOwner { AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it), }, AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::TraitAliasId(it) => attrs_from_ast_id_loc(db, it), AttrDefId::MacroId(it) => match it { MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it), MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it), @@ -659,7 +658,6 @@ impl AttrsWithOwner { AttrDefId::StaticId(id) => any_has_attrs(db, id), AttrDefId::ConstId(id) => any_has_attrs(db, id), AttrDefId::TraitId(id) => any_has_attrs(db, id), - AttrDefId::TraitAliasId(id) => any_has_attrs(db, id), AttrDefId::TypeAliasId(id) => any_has_attrs(db, id), AttrDefId::MacroId(id) => match id { MacroId::Macro2Id(id) => any_has_attrs(db, id), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index c67bb2422ac65..4e1d598623abe 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -15,8 +15,8 @@ use crate::{ EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, - ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, - TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, + ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, + TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, attr::{Attrs, AttrsWithOwner}, expr_store::{ Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes, @@ -28,7 +28,7 @@ use crate::{ nameres::crate_def_map, signatures::{ ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature, - StructSignature, TraitAliasSignature, TraitSignature, TypeAliasSignature, UnionSignature, + StructSignature, TraitSignature, TypeAliasSignature, UnionSignature, }, tt, visibility::{self, Visibility}, @@ -69,9 +69,6 @@ pub trait InternDatabase: RootQueryDb { #[salsa::interned] fn intern_trait(&self, loc: TraitLoc) -> TraitId; - #[salsa::interned] - fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId; - #[salsa::interned] fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId; @@ -152,11 +149,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase { self.function_signature_with_source_map(e).0 } - #[salsa::tracked] - fn trait_alias_signature(&self, e: TraitAliasId) -> Arc { - self.trait_alias_signature_with_source_map(e).0 - } - #[salsa::tracked] fn type_alias_signature(&self, e: TypeAliasId) -> Arc { self.type_alias_signature_with_source_map(e).0 @@ -210,12 +202,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase { e: FunctionId, ) -> (Arc, Arc); - #[salsa::invoke(TraitAliasSignature::query)] - fn trait_alias_signature_with_source_map( - &self, - e: TraitAliasId, - ) -> (Arc, Arc); - #[salsa::invoke(TypeAliasSignature::query)] fn type_alias_signature_with_source_map( &self, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs index 20018b61e5cc0..7d3a94b038330 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs @@ -33,8 +33,8 @@ pub mod keys { use crate::{ BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, - ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, - TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, + ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, + TypeAliasId, TypeOrConstParamId, UnionId, UseId, dyn_map::{DynMap, Policy}, }; @@ -48,7 +48,6 @@ pub mod keys { pub const IMPL: Key = Key::new(); pub const EXTERN_BLOCK: Key = Key::new(); pub const TRAIT: Key = Key::new(); - pub const TRAIT_ALIAS: Key = Key::new(); pub const STRUCT: Key = Key::new(); pub const UNION: Key = Key::new(); pub const ENUM: Key = Key::new(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index 3b9281ffb9c12..3794cb18e9360 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -33,7 +33,7 @@ use tt::TextRange; use crate::{ AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId, - ModuleDefId, ModuleId, TraitAliasId, TraitId, TypeAliasId, UnresolvedMacro, + ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro, builtin_type::BuiltinUint, db::DefDatabase, expr_store::{ @@ -252,28 +252,6 @@ pub(crate) fn lower_trait( (store, source_map, params) } -pub(crate) fn lower_trait_alias( - db: &dyn DefDatabase, - module: ModuleId, - trait_syntax: InFile, - trait_id: TraitAliasId, -) -> (ExpressionStore, ExpressionStoreSourceMap, Arc) { - let mut expr_collector = ExprCollector::new(db, module, trait_syntax.file_id); - let mut collector = generics::GenericParamsCollector::with_self_param( - &mut expr_collector, - trait_id.into(), - trait_syntax.value.type_bound_list(), - ); - collector.lower( - &mut expr_collector, - trait_syntax.value.generic_param_list(), - trait_syntax.value.where_clause(), - ); - let params = collector.finish(); - let (store, source_map) = expr_collector.store.finish(); - (store, source_map, params) -} - pub(crate) fn lower_type_alias( db: &dyn DefDatabase, module: ModuleId, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs index b81dcc1fe96df..5b9da3c5e6680 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs @@ -183,7 +183,6 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi } GenericDefId::ImplId(id) => format!("unimplemented {id:?}"), GenericDefId::StaticId(id) => format!("unimplemented {id:?}"), - GenericDefId::TraitAliasId(id) => format!("unimplemented {id:?}"), GenericDefId::TraitId(id) => format!("unimplemented {id:?}"), GenericDefId::TypeAliasId(id) => format!("unimplemented {id:?}"), } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs index efb558a775816..b68674c7a74f4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs @@ -24,7 +24,6 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe ModuleDefId::ConstId(id) => id.into(), ModuleDefId::StaticId(id) => id.into(), ModuleDefId::TraitId(id) => id.into(), - ModuleDefId::TraitAliasId(id) => id.into(), ModuleDefId::TypeAliasId(id) => id.into(), ModuleDefId::EnumVariantId(_) => continue, ModuleDefId::BuiltinType(_) => continue, @@ -51,7 +50,6 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe GenericDefId::ImplId(_id) => (), GenericDefId::StaticId(_id) => (), - GenericDefId::TraitAliasId(_id) => (), GenericDefId::TraitId(_id) => (), GenericDefId::TypeAliasId(_id) => (), } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index faa0ef8ceec7b..e8a6ebcffa0a5 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -12,7 +12,7 @@ use intern::sym; use rustc_hash::FxHashSet; use crate::{ - ImportPathConfig, ModuleDefId, ModuleId, + FindPathConfig, ModuleDefId, ModuleId, db::DefDatabase, item_scope::ItemInNs, nameres::DefMap, @@ -27,7 +27,7 @@ pub fn find_path( from: ModuleId, mut prefix_kind: PrefixKind, ignore_local_imports: bool, - mut cfg: ImportPathConfig, + mut cfg: FindPathConfig, ) -> Option { let _p = tracing::info_span!("find_path").entered(); @@ -96,7 +96,7 @@ impl PrefixKind { struct FindPathCtx<'db> { db: &'db dyn DefDatabase, prefix: PrefixKind, - cfg: ImportPathConfig, + cfg: FindPathConfig, ignore_local_imports: bool, is_std_item: bool, from: ModuleId, @@ -718,7 +718,7 @@ mod tests { module, prefix, ignore_local_imports, - ImportPathConfig { prefer_no_std, prefer_prelude, prefer_absolute, allow_unstable }, + FindPathConfig { prefer_no_std, prefer_prelude, prefer_absolute, allow_unstable }, ); format_to!( res, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs index 94e683cb0f8fa..60cd66bf6b082 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs @@ -203,9 +203,6 @@ impl GenericParams { } GenericDefId::ImplId(impl_id) => db.impl_signature(impl_id).generic_params.clone(), GenericDefId::StaticId(_) => EMPTY.clone(), - GenericDefId::TraitAliasId(trait_alias_id) => { - db.trait_alias_signature(trait_alias_id).generic_params.clone() - } GenericDefId::TraitId(trait_id) => db.trait_signature(trait_id).generic_params.clone(), GenericDefId::TypeAliasId(type_alias_id) => { db.type_alias_signature(type_alias_id).generic_params.clone() @@ -246,10 +243,6 @@ impl GenericParams { let sig = db.static_signature(id); (EMPTY.clone(), sig.store.clone()) } - GenericDefId::TraitAliasId(id) => { - let sig = db.trait_alias_signature(id); - (sig.generic_params.clone(), sig.store.clone()) - } GenericDefId::TraitId(id) => { let sig = db.trait_signature(id); (sig.generic_params.clone(), sig.store.clone()) @@ -294,10 +287,6 @@ impl GenericParams { let (sig, sm) = db.static_signature_with_source_map(id); (EMPTY.clone(), sig.store.clone(), sm) } - GenericDefId::TraitAliasId(id) => { - let (sig, sm) = db.trait_alias_signature_with_source_map(id); - (sig.generic_params.clone(), sig.store.clone(), sm) - } GenericDefId::TraitId(id) => { let (sig, sm) = db.trait_signature_with_source_map(id); (sig.generic_params.clone(), sig.store.clone(), sm) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index 8f526d1a2369a..77ed664f4443d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -872,7 +872,6 @@ impl PerNs { PerNs::values(def, v, import.and_then(ImportOrExternCrate::import_or_glob)) } ModuleDefId::TraitId(_) => PerNs::types(def, v, import), - ModuleDefId::TraitAliasId(_) => PerNs::types(def, v, import), ModuleDefId::TypeAliasId(_) => PerNs::types(def, v, import), ModuleDefId::BuiltinType(_) => PerNs::types(def, v, import), ModuleDefId::MacroId(mac) => PerNs::macros(mac, v, import), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index c633339857492..f35df8d3a7e11 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -276,7 +276,6 @@ enum SmallModItem { Static(Static), Struct(Struct), Trait(Trait), - TraitAlias(TraitAlias), TypeAlias(TypeAlias), Union(Union), } @@ -404,7 +403,6 @@ ModItemId -> Static in small_data -> ast::Static, Struct in small_data -> ast::Struct, Trait in small_data -> ast::Trait, - TraitAlias in small_data -> ast::TraitAlias, TypeAlias in small_data -> ast::TypeAlias, Union in small_data -> ast::Union, Use in big_data -> ast::Use, @@ -583,12 +581,6 @@ pub struct Trait { pub(crate) visibility: RawVisibilityId, } -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct TraitAlias { - pub name: Name, - pub(crate) visibility: RawVisibilityId, -} - #[derive(Debug, Clone, Eq, PartialEq)] pub struct Impl {} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 032b287cd6a82..454e06399583c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -23,7 +23,7 @@ use crate::{ BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem, - Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, + Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness, }, }; @@ -134,7 +134,6 @@ impl<'a> Ctx<'a> { ast::Item::Const(ast) => self.lower_const(ast).into(), ast::Item::Module(ast) => self.lower_module(ast)?.into(), ast::Item::Trait(ast) => self.lower_trait(ast)?.into(), - ast::Item::TraitAlias(ast) => self.lower_trait_alias(ast)?.into(), ast::Item::Impl(ast) => self.lower_impl(ast).into(), ast::Item::Use(ast) => self.lower_use(ast)?.into(), ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(), @@ -267,19 +266,6 @@ impl<'a> Ctx<'a> { Some(ast_id) } - fn lower_trait_alias( - &mut self, - trait_alias_def: &ast::TraitAlias, - ) -> Option> { - let name = trait_alias_def.name()?.as_name(); - let visibility = self.lower_visibility(trait_alias_def); - let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); - - let alias = TraitAlias { name, visibility }; - self.tree.small_data.insert(ast_id.upcast(), SmallModItem::TraitAlias(alias)); - Some(ast_id) - } - fn lower_impl(&mut self, impl_def: &ast::Impl) -> ItemTreeAstId { let ast_id = self.source_ast_id_map.ast_id(impl_def); // Note that trait impls don't get implicit `Self` unlike traits, because here they are a diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 696174cb072bf..94a6cce3ce33a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -8,7 +8,7 @@ use crate::{ item_tree::{ Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree, Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static, - Struct, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, + Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, }, visibility::RawVisibility, }; @@ -250,12 +250,6 @@ impl Printer<'_> { self.print_visibility(*visibility); w!(self, "trait {} {{ ... }}", name.display(self.db, self.edition)); } - ModItemId::TraitAlias(ast_id) => { - let TraitAlias { name, visibility } = &self.tree[ast_id]; - self.print_ast_id(ast_id.erase()); - self.print_visibility(*visibility); - wln!(self, "trait {} = ..;", name.display(self.db, self.edition)); - } ModItemId::Impl(ast_id) => { let Impl {} = &self.tree[ast_id]; self.print_ast_id(ast_id.erase()); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index d431f2140165e..df0705bf90cbc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -12,7 +12,7 @@ use crate::{ StaticId, StructId, TraitId, TypeAliasId, UnionId, db::DefDatabase, expr_store::path::Path, - nameres::{assoc::TraitItems, crate_def_map}, + nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map}, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -84,6 +84,15 @@ impl LangItemTarget { _ => None, } } + + pub fn as_adt(self) -> Option { + match self { + LangItemTarget::Union(it) => Some(it.into()), + LangItemTarget::EnumId(it) => Some(it.into()), + LangItemTarget::Struct(it) => Some(it.into()), + _ => None, + } + } } /// Salsa query. This will look for lang items in a specific crate. @@ -170,7 +179,19 @@ pub fn lang_item( { return Some(target); } - start_crate.data(db).dependencies.iter().find_map(|dep| lang_item(db, dep.crate_id, item)) + + // Our `CrateGraph` eagerly inserts sysroot dependencies like `core` or `std` into dependencies + // even if the target crate has `#![no_std]`, `#![no_core]` or shadowed sysroot dependencies + // like `dependencies.std.path = ".."`. So we use `extern_prelude()` instead of + // `CrateData.dependencies` here, which has already come through such sysroot complexities + // while nameres. + // + // See https://github.com/rust-lang/rust-analyzer/pull/20475 for details. + crate_local_def_map(db, start_crate).local(db).extern_prelude().find_map(|(_, (krate, _))| { + // Some crates declares themselves as extern crate like `extern crate self as core`. + // Ignore these to prevent cycles. + if krate.krate == start_crate { None } else { lang_item(db, krate.krate, item) } + }) } #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -277,6 +298,10 @@ impl LangItem { lang_item(db, start_crate, self).and_then(|t| t.as_trait()) } + pub fn resolve_adt(self, db: &dyn DefDatabase, start_crate: Crate) -> Option { + lang_item(db, start_crate, self).and_then(|t| t.as_adt()) + } + pub fn resolve_enum(self, db: &dyn DefDatabase, start_crate: Crate) -> Option { lang_item(db, start_crate, self).and_then(|t| t.as_enum()) } @@ -383,12 +408,17 @@ language_item_table! { AsyncFnMut, sym::async_fn_mut, async_fn_mut_trait, Target::Trait, GenericRequirement::Exact(1); AsyncFnOnce, sym::async_fn_once, async_fn_once_trait, Target::Trait, GenericRequirement::Exact(1); - AsyncFnOnceOutput, sym::async_fn_once_output,async_fn_once_output, Target::AssocTy, GenericRequirement::None; + CallRefFuture, sym::call_ref_future, call_ref_future_ty, Target::AssocTy, GenericRequirement::None; + CallOnceFuture, sym::call_once_future, call_once_future_ty, Target::AssocTy, GenericRequirement::None; + AsyncFnOnceOutput, sym::async_fn_once_output, async_fn_once_output_ty, Target::AssocTy, GenericRequirement::None; + FnOnceOutput, sym::fn_once_output, fn_once_output, Target::AssocTy, GenericRequirement::None; Future, sym::future_trait, future_trait, Target::Trait, GenericRequirement::Exact(0); CoroutineState, sym::coroutine_state, coroutine_state, Target::Enum, GenericRequirement::None; Coroutine, sym::coroutine, coroutine_trait, Target::Trait, GenericRequirement::Minimum(1); + CoroutineReturn, sym::coroutine_return, coroutine_return_ty, Target::AssocTy, GenericRequirement::None; + CoroutineYield, sym::coroutine_yield, coroutine_yield_ty, Target::AssocTy, GenericRequirement::None; Unpin, sym::unpin, unpin_trait, Target::Trait, GenericRequirement::None; Pin, sym::pin, pin_type, Target::Struct, GenericRequirement::None; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index bdf8b453e2d65..301d4cca0666c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -101,7 +101,7 @@ use crate::{ type FxIndexMap = indexmap::IndexMap; /// A wrapper around three booleans #[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)] -pub struct ImportPathConfig { +pub struct FindPathConfig { /// If true, prefer to unconditionally use imports of the `core` and `alloc` crate /// over the std. pub prefer_no_std: bool, @@ -318,9 +318,6 @@ impl TraitId { } } -pub type TraitAliasLoc = ItemLoc; -impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias); - type TypeAliasLoc = AssocItemLoc; impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); @@ -742,7 +739,6 @@ pub enum ModuleDefId { ConstId(ConstId), StaticId(StaticId), TraitId(TraitId), - TraitAliasId(TraitAliasId), TypeAliasId(TypeAliasId), BuiltinType(BuiltinType), MacroId(MacroId), @@ -756,7 +752,6 @@ impl_from!( ConstId, StaticId, TraitId, - TraitAliasId, TypeAliasId, BuiltinType for ModuleDefId @@ -862,7 +857,6 @@ pub enum GenericDefId { // More importantly, this completes the set of items that contain type references // which is to be used by the signature expression store in the future. StaticId(StaticId), - TraitAliasId(TraitAliasId), TraitId(TraitId), TypeAliasId(TypeAliasId), } @@ -872,7 +866,6 @@ impl_from!( FunctionId, ImplId, StaticId, - TraitAliasId, TraitId, TypeAliasId for GenericDefId @@ -902,7 +895,6 @@ impl GenericDefId { GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it), GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it), GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it), - GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ConstId(it) => (it.lookup(db).id.file_id, None), GenericDefId::StaticId(it) => (it.lookup(db).id.file_id, None), @@ -978,7 +970,6 @@ pub enum AttrDefId { StaticId(StaticId), ConstId(ConstId), TraitId(TraitId), - TraitAliasId(TraitAliasId), TypeAliasId(TypeAliasId), MacroId(MacroId), ImplId(ImplId), @@ -997,7 +988,6 @@ impl_from!( ConstId, FunctionId, TraitId, - TraitAliasId, TypeAliasId, MacroId(Macro2Id, MacroRulesId, ProcMacroId), ImplId, @@ -1020,7 +1010,6 @@ impl TryFrom for AttrDefId { ModuleDefId::StaticId(it) => Ok(it.into()), ModuleDefId::TraitId(it) => Ok(it.into()), ModuleDefId::TypeAliasId(it) => Ok(it.into()), - ModuleDefId::TraitAliasId(id) => Ok(id.into()), ModuleDefId::MacroId(id) => Ok(id.into()), ModuleDefId::BuiltinType(_) => Err(()), } @@ -1266,7 +1255,6 @@ impl HasModule for GenericDefId { GenericDefId::FunctionId(it) => it.module(db), GenericDefId::AdtId(it) => it.module(db), GenericDefId::TraitId(it) => it.module(db), - GenericDefId::TraitAliasId(it) => it.module(db), GenericDefId::TypeAliasId(it) => it.module(db), GenericDefId::ImplId(it) => it.module(db), GenericDefId::ConstId(it) => it.module(db), @@ -1286,7 +1274,6 @@ impl HasModule for AttrDefId { AttrDefId::StaticId(it) => it.module(db), AttrDefId::ConstId(it) => it.module(db), AttrDefId::TraitId(it) => it.module(db), - AttrDefId::TraitAliasId(it) => it.module(db), AttrDefId::TypeAliasId(it) => it.module(db), AttrDefId::ImplId(it) => it.module(db), AttrDefId::ExternBlockId(it) => it.module(db), @@ -1316,7 +1303,6 @@ impl ModuleDefId { ModuleDefId::ConstId(id) => id.module(db), ModuleDefId::StaticId(id) => id.module(db), ModuleDefId::TraitId(id) => id.module(db), - ModuleDefId::TraitAliasId(id) => id.module(db), ModuleDefId::TypeAliasId(id) => id.module(db), ModuleDefId::MacroId(id) => id.module(db), ModuleDefId::BuiltinType(_) => return None, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 5030585147dee..7d5e627964eb1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -192,8 +192,6 @@ struct DefMapCrateData { exported_derives: FxHashMap>, fn_proc_macro_mapping: FxHashMap, - /// Custom attributes registered with `#![register_attr]`. - registered_attrs: Vec, /// Custom tool modules registered with `#![register_tool]`. registered_tools: Vec, /// Unstable features of Rust enabled with `#![feature(A, B)]`. @@ -212,7 +210,6 @@ impl DefMapCrateData { Self { exported_derives: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(), - registered_attrs: Vec::new(), registered_tools: PREDEFINED_TOOLS.iter().map(|it| Symbol::intern(it)).collect(), unstable_features: FxHashSet::default(), rustc_coherence_is_core: false, @@ -227,7 +224,6 @@ impl DefMapCrateData { let Self { exported_derives, fn_proc_macro_mapping, - registered_attrs, registered_tools, unstable_features, rustc_coherence_is_core: _, @@ -238,7 +234,6 @@ impl DefMapCrateData { } = self; exported_derives.shrink_to_fit(); fn_proc_macro_mapping.shrink_to_fit(); - registered_attrs.shrink_to_fit(); registered_tools.shrink_to_fit(); unstable_features.shrink_to_fit(); } @@ -529,10 +524,6 @@ impl DefMap { &self.data.registered_tools } - pub fn registered_attrs(&self) -> &[Symbol] { - &self.data.registered_attrs - } - pub fn is_unstable_feature_enabled(&self, feature: &Symbol) -> bool { self.data.unstable_features.contains(feature) } @@ -545,6 +536,10 @@ impl DefMap { self.data.no_std || self.data.no_core } + pub fn is_no_core(&self) -> bool { + self.data.no_core + } + pub fn fn_as_proc_macro(&self, id: FunctionId) -> Option { self.data.fn_proc_macro_mapping.get(&id).copied() } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs index 07210df887369..8d2a386de8ecc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs @@ -51,10 +51,18 @@ impl TraitItems { tr: TraitId, ) -> (TraitItems, DefDiagnostics) { let ItemLoc { container: module_id, id: ast_id } = tr.lookup(db); + let ast_id_map = db.ast_id_map(ast_id.file_id); + let source = ast_id.with_value(ast_id_map.get(ast_id.value)).to_node(db); + if source.eq_token().is_some() { + // FIXME(trait-alias) probably needs special handling here + return ( + TraitItems { macro_calls: ThinVec::new(), items: Box::default() }, + DefDiagnostics::new(vec![]), + ); + } let collector = AssocItemCollector::new(db, module_id, ItemContainerId::TraitId(tr), ast_id.file_id); - let source = ast_id.with_value(collector.ast_id_map.get(ast_id.value)).to_node(db); let (items, macro_calls, diagnostics) = collector.collect(source.assoc_item_list()); (TraitItems { macro_calls, items }, DefDiagnostics::new(diagnostics)) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs index e7e96804ae737..2f56d608fcbf4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs @@ -90,13 +90,8 @@ impl DefMap { return true; } - if segments.len() == 1 { - if find_builtin_attr_idx(name).is_some() { - return true; - } - if self.data.registered_attrs.iter().any(pred) { - return true; - } + if segments.len() == 1 && find_builtin_attr_idx(name).is_some() { + return true; } } false diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 267c4451b9d71..a2ce538356515 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -27,10 +27,11 @@ use triomphe::Arc; use crate::{ AdtId, AssocItemId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, ExternBlockLoc, - ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, - LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, - MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, - StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc, + ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, FxIndexMap, ImplLoc, Intern, + ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, + MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, + ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, + UseLoc, attr::Attrs, db::DefDatabase, item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports}, @@ -69,7 +70,7 @@ pub(super) fn collect_defs( // populate external prelude and dependency list let mut deps = - FxHashMap::with_capacity_and_hasher(krate.dependencies.len(), Default::default()); + FxIndexMap::with_capacity_and_hasher(krate.dependencies.len(), Default::default()); for dep in &krate.dependencies { tracing::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id); @@ -220,7 +221,7 @@ struct DefCollector<'db> { /// Set only in case of blocks. crate_local_def_map: Option<&'db LocalDefMap>, // The dependencies of the current crate, including optional deps like `test`. - deps: FxHashMap, + deps: FxIndexMap, glob_imports: FxHashMap>, unresolved_imports: Vec, indeterminate_imports: Vec<(ImportDirective, PerNs)>, @@ -297,12 +298,6 @@ impl<'db> DefCollector<'db> { ); crate_data.unstable_features.extend(features); } - () if *attr_name == sym::register_attr => { - if let Some(ident) = attr.single_ident_value() { - crate_data.registered_attrs.push(ident.sym.clone()); - cov_mark::hit!(register_attr); - } - } () if *attr_name == sym::register_tool => { if let Some(ident) = attr.single_ident_value() { crate_data.registered_tools.push(ident.sym.clone()); @@ -332,7 +327,9 @@ impl<'db> DefCollector<'db> { let skip = dep.is_sysroot() && match dep.crate_id.data(self.db).origin { CrateOrigin::Lang(LangCrateOrigin::Core) => crate_data.no_core, - CrateOrigin::Lang(LangCrateOrigin::Std) => crate_data.no_std, + CrateOrigin::Lang(LangCrateOrigin::Std) => { + crate_data.no_core || crate_data.no_std + } _ => false, }; if skip { @@ -1954,20 +1951,6 @@ impl ModCollector<'_, '_> { false, ); } - ModItemId::TraitAlias(id) => { - let it = &self.item_tree[id]; - - let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); - update_def( - self.def_collector, - TraitAliasLoc { container: module, id: InFile::new(self.file_id(), id) } - .intern(db) - .into(), - &it.name, - vis, - false, - ); - } ModItemId::TypeAlias(id) => { let it = &self.item_tree[id]; @@ -2564,7 +2547,7 @@ mod tests { def_map, local_def_map: LocalDefMap::default(), crate_local_def_map: None, - deps: FxHashMap::default(), + deps: FxIndexMap::default(), glob_imports: FxHashMap::default(), unresolved_imports: Vec::new(), indeterminate_imports: Vec::new(), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index 338851b715bf0..6afa04bc412aa 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -84,7 +84,7 @@ pub const BAZ: u32 = 0; ) .unwrap(), ), - Arc::new(CrateWorkspaceData { data_layout: Err("".into()), toolchain: None }), + Arc::new(CrateWorkspaceData { target: Err("".into()), toolchain: None }), ) }; let a = add_crate("a", 0); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index a10990e6a8f9f..698292c2fbea4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -20,7 +20,7 @@ use crate::{ EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, - TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UseId, VariantId, + TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UseId, VariantId, builtin_type::BuiltinType, db::DefDatabase, expr_store::{ @@ -105,7 +105,6 @@ pub enum TypeNs { TypeAliasId(TypeAliasId), BuiltinType(BuiltinType), TraitId(TraitId), - TraitAliasId(TraitAliasId), ModuleId(ModuleId), } @@ -1150,7 +1149,6 @@ impl<'db> ModuleItemMap<'db> { let ty = match def.def { ModuleDefId::AdtId(it) => TypeNs::AdtId(it), ModuleDefId::TraitId(it) => TypeNs::TraitId(it), - ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it), ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it), ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it), @@ -1195,7 +1193,6 @@ fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option)> { ModuleDefId::AdtId(AdtId::EnumId(_) | AdtId::UnionId(_)) | ModuleDefId::TraitId(_) - | ModuleDefId::TraitAliasId(_) | ModuleDefId::TypeAliasId(_) | ModuleDefId::BuiltinType(_) | ModuleDefId::MacroId(_) @@ -1214,7 +1211,6 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option)> { ModuleDefId::BuiltinType(it) => TypeNs::BuiltinType(it), ModuleDefId::TraitId(it) => TypeNs::TraitId(it), - ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it), ModuleDefId::ModuleId(it) => TypeNs::ModuleId(it), @@ -1320,12 +1316,6 @@ impl HasResolver for TraitId { } } -impl HasResolver for TraitAliasId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> { - lookup_resolver(db, self).push_generic_params_scope(db, self.into()) - } -} - impl + Copy> HasResolver for T { fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> { let def = self.into(); @@ -1410,7 +1400,6 @@ impl HasResolver for GenericDefId { GenericDefId::FunctionId(inner) => inner.resolver(db), GenericDefId::AdtId(adt) => adt.resolver(db), GenericDefId::TraitId(inner) => inner.resolver(db), - GenericDefId::TraitAliasId(inner) => inner.resolver(db), GenericDefId::TypeAliasId(inner) => inner.resolver(db), GenericDefId::ImplId(inner) => inner.resolver(db), GenericDefId::ConstId(inner) => inner.resolver(db), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs index 92e610b36acd0..47638610ed734 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs @@ -20,15 +20,13 @@ use triomphe::Arc; use crate::{ ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId, - ItemContainerId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, UnionId, - VariantId, + ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId, attr::Attrs, db::DefDatabase, expr_store::{ ExpressionStore, ExpressionStoreSourceMap, lower::{ - ExprCollector, lower_function, lower_generic_params, lower_trait, lower_trait_alias, - lower_type_alias, + ExprCollector, lower_function, lower_generic_params, lower_trait, lower_type_alias, }, }, hir::{ExprId, PatId, generics::GenericParams}, @@ -395,7 +393,7 @@ impl ImplSignature { bitflags::bitflags! { #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] - pub struct TraitFlags: u8 { + pub struct TraitFlags: u16 { const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1; const FUNDAMENTAL = 1 << 2; const UNSAFE = 1 << 3; @@ -403,6 +401,8 @@ bitflags::bitflags! { const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 5; const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 6; const RUSTC_PAREN_SUGAR = 1 << 7; + const COINDUCTIVE = 1 << 8; + const ALIAS = 1 << 9; } } @@ -427,6 +427,9 @@ impl TraitSignature { if source.value.unsafe_token().is_some() { flags.insert(TraitFlags::UNSAFE); } + if source.value.eq_token().is_some() { + flags.insert(TraitFlags::ALIAS); + } if attrs.by_key(sym::fundamental).exists() { flags |= TraitFlags::FUNDAMENTAL; } @@ -436,6 +439,9 @@ impl TraitSignature { if attrs.by_key(sym::rustc_paren_sugar).exists() { flags |= TraitFlags::RUSTC_PAREN_SUGAR; } + if attrs.by_key(sym::rustc_coinductive).exists() { + flags |= TraitFlags::COINDUCTIVE; + } let mut skip_array_during_method_dispatch = attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists(); let mut skip_boxed_slice_during_method_dispatch = false; @@ -465,31 +471,6 @@ impl TraitSignature { } } -#[derive(Debug, PartialEq, Eq)] -pub struct TraitAliasSignature { - pub name: Name, - pub generic_params: Arc, - pub store: Arc, -} - -impl TraitAliasSignature { - pub fn query( - db: &dyn DefDatabase, - id: TraitAliasId, - ) -> (Arc, Arc) { - let loc = id.lookup(db); - - let source = loc.source(db); - let name = as_name_opt(source.value.name()); - let (store, source_map, generic_params) = lower_trait_alias(db, loc.container, source, id); - - ( - Arc::new(TraitAliasSignature { generic_params, store: Arc::new(store), name }), - Arc::new(source_map), - ) - } -} - bitflags! { #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] pub struct FnFlags: u16 { @@ -508,6 +489,7 @@ bitflags! { const HAS_TARGET_FEATURE = 1 << 9; const DEPRECATED_SAFE_2024 = 1 << 10; const EXPLICIT_SAFE = 1 << 11; + const RUSTC_INTRINSIC = 1 << 12; } } @@ -541,6 +523,9 @@ impl FunctionSignature { if attrs.by_key(sym::target_feature).exists() { flags.insert(FnFlags::HAS_TARGET_FEATURE); } + if attrs.by_key(sym::rustc_intrinsic).exists() { + flags.insert(FnFlags::RUSTC_INTRINSIC); + } let legacy_const_generics_indices = attrs.rustc_legacy_const_generics(); let source = loc.source(db); @@ -636,6 +621,21 @@ impl FunctionSignature { pub fn has_target_feature(&self) -> bool { self.flags.contains(FnFlags::HAS_TARGET_FEATURE) } + + pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool { + let data = db.function_signature(id); + data.flags.contains(FnFlags::RUSTC_INTRINSIC) + // Keep this around for a bit until extern "rustc-intrinsic" abis are no longer used + || match &data.abi { + Some(abi) => *abi == sym::rust_dash_intrinsic, + None => match id.lookup(db).container { + ItemContainerId::ExternBlockId(block) => { + block.abi(db) == Some(sym::rust_dash_intrinsic) + } + _ => false, + }, + } + } } bitflags! { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs index aa373a27b0d52..367b543cf9080 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs @@ -71,7 +71,7 @@ impl HasChildSource> for UseId { } impl HasChildSource for GenericDefId { - type Value = Either; + type Value = Either; fn child_source( &self, db: &dyn DefDatabase, @@ -89,12 +89,7 @@ impl HasChildSource for GenericDefId { GenericDefId::TraitId(id) => { let trait_ref = id.lookup(db).source(db).value; let idx = idx_iter.next().unwrap(); - params.insert(idx, Either::Right(ast::TraitOrAlias::Trait(trait_ref))); - } - GenericDefId::TraitAliasId(id) => { - let alias = id.lookup(db).source(db).value; - let idx = idx_iter.next().unwrap(); - params.insert(idx, Either::Right(ast::TraitOrAlias::TraitAlias(alias))); + params.insert(idx, Either::Right(trait_ref)); } _ => {} } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index e30a5b65a1f79..1e2f354f975cb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -3,7 +3,7 @@ use std::{fmt, panic, sync::Mutex}; use base_db::{ - Crate, CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb, + Crate, CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Nonce, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, }; use hir_expand::{InFile, files::FilePosition}; @@ -20,12 +20,12 @@ use crate::{ }; #[salsa_macros::db] -#[derive(Clone)] pub(crate) struct TestDB { storage: salsa::Storage, files: Arc, crates_map: Arc, events: Arc>>>, + nonce: Nonce, } impl Default for TestDB { @@ -44,6 +44,7 @@ impl Default for TestDB { events, files: Default::default(), crates_map: Default::default(), + nonce: Nonce::new(), }; this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); // This needs to be here otherwise `CrateGraphBuilder` panics. @@ -53,6 +54,18 @@ impl Default for TestDB { } } +impl Clone for TestDB { + fn clone(&self) -> Self { + Self { + storage: self.storage.clone(), + files: self.files.clone(), + crates_map: self.crates_map.clone(), + events: self.events.clone(), + nonce: Nonce::new(), + } + } +} + #[salsa_macros::db] impl salsa::Database for TestDB {} @@ -117,6 +130,10 @@ impl SourceDatabase for TestDB { fn crates_map(&self) -> Arc { self.crates_map.clone() } + + fn nonce_and_revision(&self) -> (Nonce, salsa::Revision) { + (self.nonce, salsa::plumbing::ZalsaDatabase::zalsa(self).current_revision()) + } } impl TestDB { diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index 7cc0a26d37c80..138d02e5a6105 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -24,9 +24,7 @@ oorandom = "11.1.5" tracing.workspace = true rustc-hash.workspace = true scoped-tls = "1.0.1" -chalk-solve.workspace = true chalk-ir.workspace = true -chalk-recursive.workspace = true chalk-derive.workspace = true la-arena.workspace = true triomphe.workspace = true @@ -40,7 +38,14 @@ salsa-macros.workspace = true ra-ap-rustc_abi.workspace = true ra-ap-rustc_index.workspace = true ra-ap-rustc_pattern_analysis.workspace = true +ra-ap-rustc_ast_ir.workspace = true +ra-ap-rustc_type_ir.workspace = true +ra-ap-rustc_next_trait_solver.workspace = true +# These moved to dev deps if `setup_tracing` was a macro and dependents also +# included these +tracing-subscriber.workspace = true +tracing-tree.workspace = true # local deps stdx.workspace = true @@ -53,9 +58,6 @@ span.workspace = true [dev-dependencies] expect-test = "1.5.1" -tracing.workspace = true -tracing-subscriber.workspace = true -tracing-tree.workspace = true project-model.workspace = true # local deps diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index 26ca7fb9a15ec..fd60ffcf24b0a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -3,27 +3,28 @@ //! reference to a type with the field `bar`. This is an approximation of the //! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs). -use std::mem; +use std::fmt; -use chalk_ir::cast::Cast; -use hir_def::lang_item::LangItem; -use hir_expand::name::Name; -use intern::sym; +use hir_def::{TypeAliasId, lang_item::LangItem}; +use rustc_type_ir::inherent::{IntoKind, Ty as _}; +use tracing::debug; use triomphe::Arc; +use crate::next_solver::infer::InferOk; use crate::{ - Canonical, Goal, Interner, ProjectionTyExt, TraitEnvironment, Ty, TyBuilder, TyKind, - db::HirDatabase, infer::unify::InferenceTable, + TraitEnvironment, + db::HirDatabase, + infer::unify::InferenceTable, + next_solver::{ + Ty, TyKind, + infer::traits::{ObligationCause, PredicateObligations}, + mapping::{ChalkToNextSolver, NextSolverToChalk}, + obligation_ctxt::ObligationCtxt, + }, }; const AUTODEREF_RECURSION_LIMIT: usize = 20; -#[derive(Debug)] -pub(crate) enum AutoderefKind { - Builtin, - Overloaded, -} - /// Returns types that `ty` transitively dereferences to. This function is only meant to be used /// outside `hir-ty`. /// @@ -34,16 +35,17 @@ pub(crate) enum AutoderefKind { pub fn autoderef( db: &dyn HirDatabase, env: Arc, - ty: Canonical, -) -> impl Iterator { + ty: crate::Canonical, +) -> impl Iterator { let mut table = InferenceTable::new(db, env); + let interner = table.interner; let ty = table.instantiate_canonical(ty); - let mut autoderef = Autoderef::new_no_tracking(&mut table, ty, false, false); + let mut autoderef = Autoderef::new_no_tracking(&mut table, ty.to_nextsolver(interner)); let mut v = Vec::new(); while let Some((ty, _steps)) = autoderef.next() { // `ty` may contain unresolved inference variables. Since there's no chance they would be // resolved, just replace with fallback type. - let resolved = autoderef.table.resolve_completely(ty); + let resolved = autoderef.table.resolve_completely(ty.to_chalk(interner)); // If the deref chain contains a cycle (e.g. `A` derefs to `B` and `B` derefs to `A`), we // would revisit some already visited types. Stop here to avoid duplication. @@ -59,176 +61,267 @@ pub fn autoderef( v.into_iter() } -trait TrackAutoderefSteps { +pub(crate) trait TrackAutoderefSteps<'db>: Default + fmt::Debug { fn len(&self) -> usize; - fn push(&mut self, kind: AutoderefKind, ty: &Ty); + fn push(&mut self, ty: Ty<'db>, kind: AutoderefKind); } -impl TrackAutoderefSteps for usize { +impl<'db> TrackAutoderefSteps<'db> for usize { fn len(&self) -> usize { *self } - fn push(&mut self, _: AutoderefKind, _: &Ty) { + fn push(&mut self, _: Ty<'db>, _: AutoderefKind) { *self += 1; } } -impl TrackAutoderefSteps for Vec<(AutoderefKind, Ty)> { +impl<'db> TrackAutoderefSteps<'db> for Vec<(Ty<'db>, AutoderefKind)> { fn len(&self) -> usize { self.len() } - fn push(&mut self, kind: AutoderefKind, ty: &Ty) { - self.push((kind, ty.clone())); + fn push(&mut self, ty: Ty<'db>, kind: AutoderefKind) { + self.push((ty, kind)); } } -#[derive(Debug)] -pub(crate) struct Autoderef<'table, 'db, T = Vec<(AutoderefKind, Ty)>> { - pub(crate) table: &'table mut InferenceTable<'db>, - ty: Ty, - at_start: bool, - steps: T, - explicit: bool, - use_receiver_trait: bool, +#[derive(Copy, Clone, Debug)] +pub(crate) enum AutoderefKind { + /// A true pointer type, such as `&T` and `*mut T`. + Builtin, + /// A type which must dispatch to a `Deref` implementation. + Overloaded, } -impl<'table, 'db> Autoderef<'table, 'db> { - pub(crate) fn new( - table: &'table mut InferenceTable<'db>, - ty: Ty, - explicit: bool, - use_receiver_trait: bool, - ) -> Self { - let ty = table.resolve_ty_shallow(&ty); - Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit, use_receiver_trait } - } - - pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] { - &self.steps - } +struct AutoderefSnapshot<'db, Steps> { + at_start: bool, + reached_recursion_limit: bool, + steps: Steps, + cur_ty: Ty<'db>, + obligations: PredicateObligations<'db>, } -impl<'table, 'db> Autoderef<'table, 'db, usize> { - pub(crate) fn new_no_tracking( - table: &'table mut InferenceTable<'db>, - ty: Ty, - explicit: bool, - use_receiver_trait: bool, - ) -> Self { - let ty = table.resolve_ty_shallow(&ty); - Autoderef { table, ty, at_start: true, steps: 0, explicit, use_receiver_trait } - } +#[derive(Clone, Copy)] +struct AutoderefTraits { + trait_target: TypeAliasId, } -#[allow(private_bounds)] -impl Autoderef<'_, '_, T> { - pub(crate) fn step_count(&self) -> usize { - self.steps.len() - } +/// Recursively dereference a type, considering both built-in +/// dereferences (`*`) and the `Deref` trait. +/// Although called `Autoderef` it can be configured to use the +/// `Receiver` trait instead of the `Deref` trait. +pub(crate) struct Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> { + // Meta infos: + pub(crate) table: &'a mut InferenceTable<'db>, + traits: Option, - pub(crate) fn final_ty(&self) -> Ty { - self.ty.clone() - } + // Current state: + state: AutoderefSnapshot<'db, Steps>, + + // Configurations: + include_raw_pointers: bool, + use_receiver_trait: bool, } -impl Iterator for Autoderef<'_, '_, T> { - type Item = (Ty, usize); +impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Iterator for Autoderef<'a, 'db, Steps> { + type Item = (Ty<'db>, usize); - #[tracing::instrument(skip_all)] fn next(&mut self) -> Option { - if mem::take(&mut self.at_start) { - return Some((self.ty.clone(), 0)); + debug!("autoderef: steps={:?}, cur_ty={:?}", self.state.steps, self.state.cur_ty); + if self.state.at_start { + self.state.at_start = false; + debug!("autoderef stage #0 is {:?}", self.state.cur_ty); + return Some((self.state.cur_ty, 0)); } - if self.steps.len() > AUTODEREF_RECURSION_LIMIT { + // If we have reached the recursion limit, error gracefully. + if self.state.steps.len() >= AUTODEREF_RECURSION_LIMIT { + self.state.reached_recursion_limit = true; return None; } - let (kind, new_ty) = - autoderef_step(self.table, self.ty.clone(), self.explicit, self.use_receiver_trait)?; + if self.state.cur_ty.is_ty_var() { + return None; + } + + // Otherwise, deref if type is derefable: + // NOTE: in the case of self.use_receiver_trait = true, you might think it would + // be better to skip this clause and use the Overloaded case only, since &T + // and &mut T implement Receiver. But built-in derefs apply equally to Receiver + // and Deref, and this has benefits for const and the emitted MIR. + let (kind, new_ty) = if let Some(ty) = + self.state.cur_ty.builtin_deref(self.table.db, self.include_raw_pointers) + { + debug_assert_eq!(ty, self.table.infer_ctxt.resolve_vars_if_possible(ty)); + // NOTE: we may still need to normalize the built-in deref in case + // we have some type like `&::Assoc`, since users of + // autoderef expect this type to have been structurally normalized. + if let TyKind::Alias(..) = ty.kind() { + let (normalized_ty, obligations) = structurally_normalize_ty(self.table, ty)?; + self.state.obligations.extend(obligations); + (AutoderefKind::Builtin, normalized_ty) + } else { + (AutoderefKind::Builtin, ty) + } + } else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) { + // The overloaded deref check already normalizes the pointee type. + (AutoderefKind::Overloaded, ty) + } else { + return None; + }; - self.steps.push(kind, &self.ty); - self.ty = new_ty; + self.state.steps.push(self.state.cur_ty, kind); + debug!( + "autoderef stage #{:?} is {:?} from {:?}", + self.step_count(), + new_ty, + (self.state.cur_ty, kind) + ); + self.state.cur_ty = new_ty; - Some((self.ty.clone(), self.step_count())) + Some((self.state.cur_ty, self.step_count())) } } -pub(crate) fn autoderef_step( - table: &mut InferenceTable<'_>, - ty: Ty, - explicit: bool, - use_receiver_trait: bool, -) -> Option<(AutoderefKind, Ty)> { - if let Some(derefed) = builtin_deref(table.db, &ty, explicit) { - Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed))) - } else { - Some((AutoderefKind::Overloaded, deref_by_trait(table, ty, use_receiver_trait)?)) +impl<'a, 'db> Autoderef<'a, 'db> { + pub(crate) fn new(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self { + Self::new_impl(table, base_ty) } } -pub(crate) fn builtin_deref<'ty>( - db: &dyn HirDatabase, - ty: &'ty Ty, - explicit: bool, -) -> Option<&'ty Ty> { - match ty.kind(Interner) { - TyKind::Ref(.., ty) => Some(ty), - TyKind::Raw(.., ty) if explicit => Some(ty), - &TyKind::Adt(chalk_ir::AdtId(adt), ref substs) if crate::lang_items::is_box(db, adt) => { - substs.at(Interner, 0).ty(Interner) - } - _ => None, +impl<'a, 'db> Autoderef<'a, 'db, usize> { + pub(crate) fn new_no_tracking(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self { + Self::new_impl(table, base_ty) } } -pub(crate) fn deref_by_trait( - table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>, - ty: Ty, - use_receiver_trait: bool, -) -> Option { - let _p = tracing::info_span!("deref_by_trait").entered(); - if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() { - // don't try to deref unknown variables - return None; +impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> { + fn new_impl(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self { + Autoderef { + state: AutoderefSnapshot { + steps: Steps::default(), + cur_ty: table.infer_ctxt.resolve_vars_if_possible(base_ty), + obligations: PredicateObligations::new(), + at_start: true, + reached_recursion_limit: false, + }, + table, + traits: None, + include_raw_pointers: false, + use_receiver_trait: false, + } } - let trait_id = || { - // FIXME: Remove the `false` once `Receiver` needs to be stabilized, doing so will - // effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the - // blanked impl on `Deref`. - #[expect(clippy::overly_complex_bool_expr)] - if use_receiver_trait - && false - && let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) - { - return Some(receiver); + fn autoderef_traits(&mut self) -> Option { + match &mut self.traits { + Some(it) => Some(*it), + None => { + let traits = if self.use_receiver_trait { + AutoderefTraits { + trait_target: LangItem::ReceiverTarget + .resolve_type_alias(self.table.db, self.table.trait_env.krate) + .or_else(|| { + LangItem::DerefTarget + .resolve_type_alias(self.table.db, self.table.trait_env.krate) + })?, + } + } else { + AutoderefTraits { + trait_target: LangItem::DerefTarget + .resolve_type_alias(self.table.db, self.table.trait_env.krate)?, + } + }; + Some(*self.traits.insert(traits)) + } } - // Old rustc versions might not have `Receiver` trait. - // Fallback to `Deref` if they don't - LangItem::Deref.resolve_trait(db, table.trait_env.krate) - }; - let trait_id = trait_id()?; - let target = - trait_id.trait_items(db).associated_type_by_name(&Name::new_symbol_root(sym::Target))?; - - let projection = { - let b = TyBuilder::subst_for_def(db, trait_id, None); - if b.remaining() != 1 { - // the Target type + Deref trait should only have one generic parameter, - // namely Deref's Self type - return None; - } - let deref_subst = b.push(ty).build(); - TyBuilder::assoc_type_projection(db, target, Some(deref_subst)).build() + } + + fn overloaded_deref_ty(&mut self, ty: Ty<'db>) -> Option> { + debug!("overloaded_deref_ty({:?})", ty); + let interner = self.table.interner; + + // , or whatever the equivalent trait is that we've been asked to walk. + let AutoderefTraits { trait_target } = self.autoderef_traits()?; + + let (normalized_ty, obligations) = structurally_normalize_ty( + self.table, + Ty::new_projection(interner, trait_target.into(), [ty]), + )?; + debug!("overloaded_deref_ty({:?}) = ({:?}, {:?})", ty, normalized_ty, obligations); + self.state.obligations.extend(obligations); + + Some(self.table.infer_ctxt.resolve_vars_if_possible(normalized_ty)) + } + + /// Returns the final type we ended up with, which may be an unresolved + /// inference variable. + pub(crate) fn final_ty(&self) -> Ty<'db> { + self.state.cur_ty + } + + pub(crate) fn step_count(&self) -> usize { + self.state.steps.len() + } + + pub(crate) fn take_obligations(&mut self) -> PredicateObligations<'db> { + std::mem::take(&mut self.state.obligations) + } + + pub(crate) fn steps(&self) -> &Steps { + &self.state.steps + } + + #[expect(dead_code)] + pub(crate) fn reached_recursion_limit(&self) -> bool { + self.state.reached_recursion_limit + } + + /// also dereference through raw pointer types + /// e.g., assuming ptr_to_Foo is the type `*const Foo` + /// fcx.autoderef(span, ptr_to_Foo) => [*const Foo] + /// fcx.autoderef(span, ptr_to_Foo).include_raw_ptrs() => [*const Foo, Foo] + pub(crate) fn include_raw_pointers(mut self) -> Self { + self.include_raw_pointers = true; + self + } + + /// Use `core::ops::Receiver` and `core::ops::Receiver::Target` as + /// the trait and associated type to iterate, instead of + /// `core::ops::Deref` and `core::ops::Deref::Target` + pub(crate) fn use_receiver_trait(mut self) -> Self { + self.use_receiver_trait = true; + self + } +} + +fn structurally_normalize_ty<'db>( + table: &InferenceTable<'db>, + ty: Ty<'db>, +) -> Option<(Ty<'db>, PredicateObligations<'db>)> { + let mut ocx = ObligationCtxt::new(&table.infer_ctxt); + let Ok(normalized_ty) = + ocx.structurally_normalize_ty(&ObligationCause::misc(), table.param_env, ty) + else { + // We shouldn't have errors here in the old solver, except for + // evaluate/fulfill mismatches, but that's not a reason for an ICE. + return None; }; + let errors = ocx.select_where_possible(); + if !errors.is_empty() { + unreachable!(); + } + + Some((normalized_ty, ocx.into_pending_obligations())) +} + +pub(crate) fn overloaded_deref_ty<'db>( + table: &InferenceTable<'db>, + ty: Ty<'db>, +) -> Option>> { + let interner = table.interner; - // Check that the type implements Deref at all - let trait_ref = projection.trait_ref(db); - let implements_goal: Goal = trait_ref.cast(Interner); - table.try_obligation(implements_goal.clone())?; + let trait_target = LangItem::DerefTarget.resolve_type_alias(table.db, table.trait_env.krate)?; - table.register_obligation(implements_goal); + let (normalized_ty, obligations) = + structurally_normalize_ty(table, Ty::new_projection(interner, trait_target.into(), [ty]))?; - let result = table.normalize_projection_ty(projection); - Some(table.resolve_ty_shallow(&result)) + Some(InferOk { value: normalized_ty, obligations }) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs index 8af8fb73f344e..3755175cf5163 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs @@ -1,16 +1,12 @@ //! `TyBuilder`, a helper for building instances of `Ty` and related types. -use std::iter; - use chalk_ir::{ AdtId, DebruijnIndex, Scalar, cast::{Cast, CastTo, Caster}, fold::TypeFoldable, interner::HasInterner, }; -use hir_def::{ - DefWithBodyId, GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType, -}; +use hir_def::{GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType}; use smallvec::SmallVec; use crate::{ @@ -246,47 +242,6 @@ impl TyBuilder<()> { TyBuilder::new((), params, parent_subst) } - /// Creates a `TyBuilder` to build `Substitution` for a coroutine defined in `parent`. - /// - /// A coroutine's substitution consists of: - /// - resume type of coroutine - /// - yield type of coroutine ([`Coroutine::Yield`](std::ops::Coroutine::Yield)) - /// - return type of coroutine ([`Coroutine::Return`](std::ops::Coroutine::Return)) - /// - generic parameters in scope on `parent` - /// - /// in this order. - /// - /// This method prepopulates the builder with placeholder substitution of `parent`, so you - /// should only push exactly 3 `GenericArg`s before building. - pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> { - let parent_subst = - parent.as_generic_def_id(db).map(|p| generics(db, p).placeholder_subst(db)); - // These represent resume type, yield type, and return type of coroutine. - let params = std::iter::repeat_n(ParamKind::Type, 3).collect(); - TyBuilder::new((), params, parent_subst) - } - - pub fn subst_for_closure( - db: &dyn HirDatabase, - parent: DefWithBodyId, - sig_ty: Ty, - ) -> Substitution { - let sig_ty = sig_ty.cast(Interner); - let self_subst = iter::once(&sig_ty); - let Some(parent) = parent.as_generic_def_id(db) else { - return Substitution::from_iter(Interner, self_subst); - }; - Substitution::from_iter( - Interner, - generics(db, parent) - .placeholder_subst(db) - .iter(Interner) - .chain(self_subst) - .cloned() - .collect::>(), - ) - } - pub fn build(self) -> Substitution { let ((), subst) = self.build_internal(); subst diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 3ba7c93d4fb76..546991cf6571e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -1,602 +1,15 @@ //! The implementation of `RustIrDatabase` for Chalk, which provides information //! about the code that Chalk needs. -use core::ops; -use std::{iter, ops::ControlFlow, sync::Arc}; +use hir_def::{CallableDefId, GenericDefId}; -use hir_expand::name::Name; -use intern::sym; -use span::Edition; -use tracing::debug; - -use chalk_ir::{CanonicalVarKinds, cast::Caster, fold::shift::Shift}; -use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; - -use base_db::Crate; -use hir_def::{ - AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup, - TypeAliasId, VariantId, - hir::Movability, - lang_item::LangItem, - signatures::{ImplFlags, StructFlags, TraitFlags}, -}; - -use crate::{ - AliasEq, AliasTy, BoundVar, DebruijnIndex, Interner, ProjectionTy, ProjectionTyExt, - QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, - WhereClause, - db::{HirDatabase, InternedCoroutine}, - from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, - generics::generics, - lower::LifetimeElisionKind, - make_binders, make_single_type_binders, - mapping::{ToChalk, TypeAliasAsValue, from_chalk}, - method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TraitImpls, TyFingerprint}, - to_assoc_type_id, to_chalk_trait_id, - traits::ChalkContext, - utils::ClosureSubst, - wrap_empty_binders, -}; - -pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; -pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum; -pub(crate) type AdtDatum = chalk_solve::rust_ir::AdtDatum; -pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum; -pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum; +use crate::{Interner, Substitution, db::HirDatabase, mapping::from_chalk}; pub(crate) type AssocTypeId = chalk_ir::AssocTypeId; pub(crate) type TraitId = chalk_ir::TraitId; pub(crate) type AdtId = chalk_ir::AdtId; pub(crate) type ImplId = chalk_ir::ImplId; -pub(crate) type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId; -pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue; -pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum; pub(crate) type Variances = chalk_ir::Variances; -impl chalk_solve::RustIrDatabase for ChalkContext<'_> { - fn associated_ty_data(&self, id: AssocTypeId) -> Arc { - self.db.associated_ty_data(from_assoc_type_id(id)) - } - fn associated_ty_from_impl( - &self, - impl_id: chalk_ir::ImplId, - assoc_type_id: chalk_ir::AssocTypeId, - ) -> Option> { - let alias_id = from_assoc_type_id(assoc_type_id); - let trait_sig = self.db.type_alias_signature(alias_id); - hir_def::ImplId::from_chalk(self.db, impl_id).impl_items(self.db).items.iter().find_map( - |(name, item)| match item { - AssocItemId::TypeAliasId(alias) if &trait_sig.name == name => { - Some(TypeAliasAsValue(*alias).to_chalk(self.db)) - } - _ => None, - }, - ) - } - fn trait_datum(&self, trait_id: TraitId) -> Arc { - self.db.trait_datum(self.krate, trait_id) - } - fn adt_datum(&self, struct_id: AdtId) -> Arc { - self.db.adt_datum(self.krate, struct_id) - } - fn adt_repr(&self, _struct_id: AdtId) -> Arc> { - // FIXME: keep track of these - Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None }) - } - fn discriminant_type(&self, ty: chalk_ir::Ty) -> chalk_ir::Ty { - if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) - && let hir_def::AdtId::EnumId(e) = id.0 - { - let enum_data = self.db.enum_signature(e); - let ty = enum_data.repr.unwrap_or_default().discr_type(); - return chalk_ir::TyKind::Scalar(match ty { - hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { - true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize), - false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize), - }, - hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed { - true => chalk_ir::Scalar::Int(match size { - hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8, - hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16, - hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32, - hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64, - hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128, - }), - false => chalk_ir::Scalar::Uint(match size { - hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8, - hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16, - hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32, - hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64, - hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128, - }), - }, - }) - .intern(Interner); - } - chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner) - } - fn impl_datum(&self, impl_id: ImplId) -> Arc { - self.db.impl_datum(self.krate, impl_id) - } - - fn fn_def_datum( - &self, - fn_def_id: chalk_ir::FnDefId, - ) -> Arc> { - self.db.fn_def_datum(from_chalk(self.db, fn_def_id)) - } - - fn impls_for_trait( - &self, - trait_id: TraitId, - parameters: &[chalk_ir::GenericArg], - binders: &CanonicalVarKinds, - ) -> Vec { - debug!("impls_for_trait {:?}", trait_id); - let trait_: hir_def::TraitId = from_chalk_trait_id(trait_id); - - let ty: Ty = parameters[0].assert_ty_ref(Interner).clone(); - - fn binder_kind( - ty: &Ty, - binders: &CanonicalVarKinds, - ) -> Option { - if let TyKind::BoundVar(bv) = ty.kind(Interner) { - let binders = binders.as_slice(Interner); - if bv.debruijn == DebruijnIndex::INNERMOST - && let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind - { - return Some(tk); - } - } - None - } - - let self_ty_fp = TyFingerprint::for_trait_impl(&ty); - let fps: &[TyFingerprint] = match binder_kind(&ty, binders) { - Some(chalk_ir::TyVariableKind::Integer) => &ALL_INT_FPS, - Some(chalk_ir::TyVariableKind::Float) => &ALL_FLOAT_FPS, - _ => self_ty_fp.as_slice(), - }; - - let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db); - - let mut result = vec![]; - if fps.is_empty() { - debug!("Unrestricted search for {:?} impls...", trait_); - _ = self.for_trait_impls(trait_, self_ty_fp, |impls| { - result.extend(impls.for_trait(trait_).map(id_to_chalk)); - ControlFlow::Continue(()) - }); - } else { - _ = - self.for_trait_impls(trait_, self_ty_fp, |impls| { - result.extend(fps.iter().flat_map(move |fp| { - impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) - })); - ControlFlow::Continue(()) - }); - }; - - debug!("impls_for_trait returned {} impls", result.len()); - result - } - - fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind) -> bool { - debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind); - - let trait_id = from_chalk_trait_id(auto_trait_id); - let self_ty = kind.clone().intern(Interner); - // We cannot filter impls by `TyFingerprint` for the following types: - let self_ty_fp = match kind { - // because we need to find any impl whose Self type is a ref with the same mutability - // (we don't care about the inner type). - TyKind::Ref(..) => None, - // because we need to find any impl whose Self type is a tuple with the same arity. - TyKind::Tuple(..) => None, - _ => TyFingerprint::for_trait_impl(&self_ty), - }; - - let check_kind = |impl_id| { - let impl_self_ty = self.db.impl_self_ty(impl_id); - // NOTE(skip_binders): it's safe to skip binders here as we don't check substitutions. - let impl_self_kind = impl_self_ty.skip_binders().kind(Interner); - - match (kind, impl_self_kind) { - (TyKind::Adt(id_a, _), TyKind::Adt(id_b, _)) => id_a == id_b, - (TyKind::AssociatedType(id_a, _), TyKind::AssociatedType(id_b, _)) => id_a == id_b, - (TyKind::Scalar(scalar_a), TyKind::Scalar(scalar_b)) => scalar_a == scalar_b, - (TyKind::Error, TyKind::Error) - | (TyKind::Str, TyKind::Str) - | (TyKind::Slice(_), TyKind::Slice(_)) - | (TyKind::Never, TyKind::Never) - | (TyKind::Array(_, _), TyKind::Array(_, _)) => true, - (TyKind::Tuple(arity_a, _), TyKind::Tuple(arity_b, _)) => arity_a == arity_b, - (TyKind::OpaqueType(id_a, _), TyKind::OpaqueType(id_b, _)) => id_a == id_b, - (TyKind::FnDef(id_a, _), TyKind::FnDef(id_b, _)) => id_a == id_b, - (TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _)) - | (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b, - (TyKind::Closure(id_a, _), TyKind::Closure(id_b, _)) => id_a == id_b, - (TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _)) - | (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => { - id_a == id_b - } - (TyKind::Foreign(id_a), TyKind::Foreign(id_b)) => id_a == id_b, - (_, _) => false, - } - }; - - if let Some(fp) = self_ty_fp { - self.for_trait_impls(trait_id, self_ty_fp, |impls| { - match impls.for_trait_and_self_ty(trait_id, fp).any(check_kind) { - true => ControlFlow::Break(()), - false => ControlFlow::Continue(()), - } - }) - } else { - self.for_trait_impls(trait_id, self_ty_fp, |impls| { - match impls.for_trait(trait_id).any(check_kind) { - true => ControlFlow::Break(()), - false => ControlFlow::Continue(()), - } - }) - } - .is_break() - } - - fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc { - self.db.associated_ty_value(self.krate, id) - } - - fn custom_clauses(&self) -> Vec> { - vec![] - } - fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec { - // We don't do coherence checking (yet) - unimplemented!() - } - fn interner(&self) -> Interner { - Interner - } - fn well_known_trait_id( - &self, - well_known_trait: WellKnownTrait, - ) -> Option> { - let lang_item = lang_item_from_well_known_trait(well_known_trait); - let trait_ = lang_item.resolve_trait(self.db, self.krate)?; - Some(to_chalk_trait_id(trait_)) - } - fn well_known_assoc_type_id( - &self, - assoc_type: rust_ir::WellKnownAssocType, - ) -> Option> { - let lang_item = match assoc_type { - rust_ir::WellKnownAssocType::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput, - }; - let alias = lang_item.resolve_type_alias(self.db, self.krate)?; - Some(to_assoc_type_id(alias)) - } - - fn program_clauses_for_env( - &self, - environment: &chalk_ir::Environment, - ) -> chalk_ir::ProgramClauses { - self.db.program_clauses_for_chalk_env(self.krate, self.block, environment.clone()) - } - - fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId) -> Arc { - let full_id = self.db.lookup_intern_impl_trait_id(id.into()); - let bound = match full_id { - crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { - let datas = self - .db - .return_type_impl_traits(func) - .expect("impl trait id without impl traits"); - let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders(); - let data = &datas.impl_traits[idx]; - let bound = OpaqueTyDatumBound { - bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()), - where_clauses: chalk_ir::Binders::empty(Interner, vec![]), - }; - chalk_ir::Binders::new(binders, bound) - } - crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => { - let datas = self - .db - .type_alias_impl_traits(alias) - .expect("impl trait id without impl traits"); - let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders(); - let data = &datas.impl_traits[idx]; - let bound = OpaqueTyDatumBound { - bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()), - where_clauses: chalk_ir::Binders::empty(Interner, vec![]), - }; - chalk_ir::Binders::new(binders, bound) - } - crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => { - if let Some((future_trait, future_output)) = - LangItem::Future.resolve_trait(self.db, self.krate).and_then(|trait_| { - let alias = trait_ - .trait_items(self.db) - .associated_type_by_name(&Name::new_symbol_root(sym::Output))?; - Some((trait_, alias)) - }) - { - // Making up Symbol’s value as variable is void: AsyncBlock: - // - // |--------------------OpaqueTyDatum-------------------| - // |-------------OpaqueTyDatumBound--------------| - // for [Future, Future::Output = T] - // ^1 ^0 ^0 ^0 ^1 - let impl_bound = WhereClause::Implemented(TraitRef { - trait_id: to_chalk_trait_id(future_trait), - // Self type as the first parameter. - substitution: Substitution::from1( - Interner, - TyKind::BoundVar(BoundVar { - debruijn: DebruijnIndex::INNERMOST, - index: 0, - }) - .intern(Interner), - ), - }); - let mut binder = vec![]; - binder.push(crate::wrap_empty_binders(impl_bound)); - let sized_trait = LangItem::Sized.resolve_trait(self.db, self.krate); - if let Some(sized_trait_) = sized_trait { - let sized_bound = WhereClause::Implemented(TraitRef { - trait_id: to_chalk_trait_id(sized_trait_), - // Self type as the first parameter. - substitution: Substitution::from1( - Interner, - TyKind::BoundVar(BoundVar { - debruijn: DebruijnIndex::INNERMOST, - index: 0, - }) - .intern(Interner), - ), - }); - binder.push(crate::wrap_empty_binders(sized_bound)); - } - let proj_bound = WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(ProjectionTy { - associated_ty_id: to_assoc_type_id(future_output), - // Self type as the first parameter. - substitution: Substitution::from1( - Interner, - TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)) - .intern(Interner), - ), - }), - // The parameter of the opaque type. - ty: TyKind::BoundVar(BoundVar { debruijn: DebruijnIndex::ONE, index: 0 }) - .intern(Interner), - }); - binder.push(crate::wrap_empty_binders(proj_bound)); - let bound = OpaqueTyDatumBound { - bounds: make_single_type_binders(binder), - where_clauses: chalk_ir::Binders::empty(Interner, vec![]), - }; - // The opaque type has 1 parameter. - make_single_type_binders(bound) - } else { - // If failed to find Symbol’s value as variable is void: Future::Output, return empty bounds as fallback. - let bound = OpaqueTyDatumBound { - bounds: chalk_ir::Binders::empty(Interner, vec![]), - where_clauses: chalk_ir::Binders::empty(Interner, vec![]), - }; - // The opaque type has 1 parameter. - make_single_type_binders(bound) - } - } - }; - - Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound }) - } - - fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId) -> chalk_ir::Ty { - // FIXME: actually provide the hidden type; it is relevant for auto traits - TyKind::Error.intern(Interner) - } - - // object safety was renamed to dyn-compatibility but still remains here in chalk. - // This will be removed since we are going to migrate to next-gen trait solver. - fn is_object_safe(&self, trait_id: chalk_ir::TraitId) -> bool { - let trait_ = from_chalk_trait_id(trait_id); - crate::dyn_compatibility::dyn_compatibility(self.db, trait_).is_none() - } - - fn closure_kind( - &self, - _closure_id: chalk_ir::ClosureId, - _substs: &chalk_ir::Substitution, - ) -> rust_ir::ClosureKind { - // Fn is the closure kind that implements all three traits - rust_ir::ClosureKind::Fn - } - fn closure_inputs_and_output( - &self, - _closure_id: chalk_ir::ClosureId, - substs: &chalk_ir::Substitution, - ) -> chalk_ir::Binders> { - let sig_ty = ClosureSubst(substs).sig_ty(); - let sig = &sig_ty.callable_sig(self.db).expect("first closure param should be fn ptr"); - let io = rust_ir::FnDefInputsAndOutputDatum { - argument_types: sig.params().to_vec(), - return_type: sig.ret().clone(), - }; - chalk_ir::Binders::empty(Interner, io.shifted_in(Interner)) - } - fn closure_upvars( - &self, - _closure_id: chalk_ir::ClosureId, - _substs: &chalk_ir::Substitution, - ) -> chalk_ir::Binders> { - let ty = TyBuilder::unit(); - chalk_ir::Binders::empty(Interner, ty) - } - fn closure_fn_substitution( - &self, - _closure_id: chalk_ir::ClosureId, - _substs: &chalk_ir::Substitution, - ) -> chalk_ir::Substitution { - Substitution::empty(Interner) - } - - fn trait_name(&self, trait_id: chalk_ir::TraitId) -> String { - let id = from_chalk_trait_id(trait_id); - self.db.trait_signature(id).name.display(self.db, self.edition()).to_string() - } - fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String { - let edition = self.edition(); - match adt_id { - hir_def::AdtId::StructId(id) => { - self.db.struct_signature(id).name.display(self.db, edition).to_string() - } - hir_def::AdtId::EnumId(id) => { - self.db.enum_signature(id).name.display(self.db, edition).to_string() - } - hir_def::AdtId::UnionId(id) => { - self.db.union_signature(id).name.display(self.db, edition).to_string() - } - } - } - fn adt_size_align(&self, _id: chalk_ir::AdtId) -> Arc { - // FIXME - Arc::new(rust_ir::AdtSizeAlign::from_one_zst(false)) - } - fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId) -> String { - let id = self.db.associated_ty_data(from_assoc_type_id(assoc_ty_id)).name; - self.db.type_alias_signature(id).name.display(self.db, self.edition()).to_string() - } - fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId) -> String { - format!("Opaque_{:?}", opaque_ty_id.0) - } - fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId) -> String { - format!("fn_{:?}", fn_def_id.0) - } - fn coroutine_datum( - &self, - id: chalk_ir::CoroutineId, - ) -> Arc> { - let InternedCoroutine(parent, expr) = self.db.lookup_intern_coroutine(id.into()); - - // We fill substitution with unknown type, because we only need to know whether the generic - // params are types or consts to build `Binders` and those being filled up are for - // `resume_type`, `yield_type`, and `return_type` of the coroutine in question. - let subst = TyBuilder::subst_for_coroutine(self.db, parent).fill_with_unknown().build(); - - let len = subst.len(Interner); - let input_output = rust_ir::CoroutineInputOutputDatum { - resume_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 3)) - .intern(Interner), - yield_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 2)) - .intern(Interner), - return_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 1)) - .intern(Interner), - // FIXME: calculate upvars - upvars: vec![], - }; - - let it = subst - .iter(Interner) - .map(|it| it.constant(Interner).map(|c| c.data(Interner).ty.clone())); - let input_output = crate::make_type_and_const_binders(it, input_output); - - let movability = match self.db.body(parent)[expr] { - hir_def::hir::Expr::Closure { - closure_kind: hir_def::hir::ClosureKind::Coroutine(movability), - .. - } => movability, - _ => unreachable!("non coroutine expression interned as coroutine"), - }; - let movability = match movability { - Movability::Static => rust_ir::Movability::Static, - Movability::Movable => rust_ir::Movability::Movable, - }; - - Arc::new(rust_ir::CoroutineDatum { movability, input_output }) - } - fn coroutine_witness_datum( - &self, - id: chalk_ir::CoroutineId, - ) -> Arc> { - // FIXME: calculate inner types - let inner_types = - rust_ir::CoroutineWitnessExistential { types: wrap_empty_binders(vec![]) }; - - let InternedCoroutine(parent, _) = self.db.lookup_intern_coroutine(id.into()); - // See the comment in `coroutine_datum()` for unknown types. - let subst = TyBuilder::subst_for_coroutine(self.db, parent).fill_with_unknown().build(); - let it = subst - .iter(Interner) - .map(|it| it.constant(Interner).map(|c| c.data(Interner).ty.clone())); - let inner_types = crate::make_type_and_const_binders(it, inner_types); - - Arc::new(rust_ir::CoroutineWitnessDatum { inner_types }) - } - - fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase { - &self.db - } -} - -impl ChalkContext<'_> { - fn edition(&self) -> Edition { - self.krate.data(self.db).edition - } - - fn for_trait_impls( - &self, - trait_id: hir_def::TraitId, - self_ty_fp: Option, - mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>, - ) -> ControlFlow<()> { - // Note: Since we're using `impls_for_trait` and `impl_provided_for`, - // only impls where the trait can be resolved should ever reach Chalk. - // `impl_datum` relies on that and will panic if the trait can't be resolved. - let in_deps = self.db.trait_impls_in_deps(self.krate); - let in_self = self.db.trait_impls_in_crate(self.krate); - let trait_module = trait_id.module(self.db); - let type_module = match self_ty_fp { - Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db)), - Some(TyFingerprint::ForeignType(type_id)) => { - Some(from_foreign_def_id(type_id).module(self.db)) - } - Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db)), - _ => None, - }; - - let mut def_blocks = - [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]; - - let block_impls = iter::successors(self.block, |&block_id| { - cov_mark::hit!(block_local_impls); - block_id.loc(self.db).module.containing_block() - }) - .inspect(|&block_id| { - // make sure we don't search the same block twice - def_blocks.iter_mut().for_each(|block| { - if *block == Some(block_id) { - *block = None; - } - }); - }) - .filter_map(|block_id| self.db.trait_impls_in_block(block_id)); - f(&in_self)?; - for it in in_deps.iter().map(ops::Deref::deref) { - f(it)?; - } - for it in block_impls { - f(&it)?; - } - for it in def_blocks.into_iter().flatten().filter_map(|it| self.db.trait_impls_in_block(it)) - { - f(&it)?; - } - ControlFlow::Continue(()) - } -} - impl chalk_ir::UnificationDatabase for &dyn HirDatabase { fn fn_def_variance( &self, @@ -610,374 +23,6 @@ impl chalk_ir::UnificationDatabase for &dyn HirDatabase { } } -pub(crate) fn program_clauses_for_chalk_env_query( - db: &dyn HirDatabase, - krate: Crate, - block: Option, - environment: chalk_ir::Environment, -) -> chalk_ir::ProgramClauses { - chalk_solve::program_clauses_for_env(&ChalkContext { db, krate, block }, &environment) -} - -pub(crate) fn associated_ty_data_query( - db: &dyn HirDatabase, - type_alias: TypeAliasId, -) -> Arc { - debug!("associated_ty_data {:?}", type_alias); - let trait_ = match type_alias.lookup(db).container { - ItemContainerId::TraitId(t) => t, - _ => panic!("associated type not in trait"), - }; - - // Lower bounds -- we could/should maybe move this to a separate query in `lower` - let type_alias_data = db.type_alias_signature(type_alias); - let generic_params = generics(db, type_alias.into()); - let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); - let mut ctx = crate::TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - type_alias.into(), - LifetimeElisionKind::AnonymousReportError, - ) - .with_type_param_mode(crate::lower::ParamLoweringMode::Variable); - - let trait_subst = TyBuilder::subst_for_def(db, trait_, None) - .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0) - .build(); - let pro_ty = TyBuilder::assoc_type_projection(db, type_alias, Some(trait_subst)) - .fill_with_bound_vars( - crate::DebruijnIndex::INNERMOST, - generic_params.parent_generics().map_or(0, |it| it.len()), - ) - .build(); - let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner); - - let mut bounds = Vec::new(); - for bound in &type_alias_data.bounds { - ctx.lower_type_bound(bound, self_ty.clone(), false).for_each(|pred| { - if let Some(pred) = generic_predicate_to_inline_bound(db, &pred, &self_ty) { - bounds.push(pred); - } - }); - } - - if !ctx.unsized_types.contains(&self_ty) { - let sized_trait = - LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id); - let sized_bound = sized_trait.into_iter().map(|sized_trait| { - let trait_bound = - rust_ir::TraitBound { trait_id: sized_trait, args_no_self: Default::default() }; - let inline_bound = rust_ir::InlineBound::TraitBound(trait_bound); - chalk_ir::Binders::empty(Interner, inline_bound) - }); - bounds.extend(sized_bound); - bounds.shrink_to_fit(); - } - - // FIXME: Re-enable where clauses on associated types when an upstream chalk bug is fixed. - // (rust-analyzer#9052) - // let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars); - let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] }; - let datum = AssociatedTyDatum { - trait_id: to_chalk_trait_id(trait_), - id: to_assoc_type_id(type_alias), - name: type_alias, - binders: make_binders(db, &generic_params, bound_data), - }; - Arc::new(datum) -} - -pub(crate) fn trait_datum_query( - db: &dyn HirDatabase, - krate: Crate, - trait_id: TraitId, -) -> Arc { - debug!("trait_datum {:?}", trait_id); - let trait_ = from_chalk_trait_id(trait_id); - let trait_data = db.trait_signature(trait_); - debug!("trait {:?} = {:?}", trait_id, trait_data.name); - let generic_params = generics(db, trait_.into()); - let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - let flags = rust_ir::TraitFlags { - auto: trait_data.flags.contains(TraitFlags::AUTO), - upstream: trait_.lookup(db).container.krate() != krate, - non_enumerable: true, - coinductive: false, // only relevant for Chalk testing - // FIXME: set these flags correctly - marker: false, - fundamental: trait_data.flags.contains(TraitFlags::FUNDAMENTAL), - }; - let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); - let associated_ty_ids = - trait_.trait_items(db).associated_types().map(to_assoc_type_id).collect(); - let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses }; - let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item); - let trait_datum = TraitDatum { - id: trait_id, - binders: make_binders(db, &generic_params, trait_datum_bound), - flags, - associated_ty_ids, - well_known, - }; - Arc::new(trait_datum) -} - -fn well_known_trait_from_lang_item(item: LangItem) -> Option { - Some(match item { - LangItem::Clone => WellKnownTrait::Clone, - LangItem::CoerceUnsized => WellKnownTrait::CoerceUnsized, - LangItem::Copy => WellKnownTrait::Copy, - LangItem::DiscriminantKind => WellKnownTrait::DiscriminantKind, - LangItem::DispatchFromDyn => WellKnownTrait::DispatchFromDyn, - LangItem::Drop => WellKnownTrait::Drop, - LangItem::Fn => WellKnownTrait::Fn, - LangItem::FnMut => WellKnownTrait::FnMut, - LangItem::FnOnce => WellKnownTrait::FnOnce, - LangItem::AsyncFn => WellKnownTrait::AsyncFn, - LangItem::AsyncFnMut => WellKnownTrait::AsyncFnMut, - LangItem::AsyncFnOnce => WellKnownTrait::AsyncFnOnce, - LangItem::Coroutine => WellKnownTrait::Coroutine, - LangItem::Sized => WellKnownTrait::Sized, - LangItem::Unpin => WellKnownTrait::Unpin, - LangItem::Unsize => WellKnownTrait::Unsize, - LangItem::Tuple => WellKnownTrait::Tuple, - LangItem::PointeeTrait => WellKnownTrait::Pointee, - LangItem::FnPtrTrait => WellKnownTrait::FnPtr, - LangItem::Future => WellKnownTrait::Future, - _ => return None, - }) -} - -fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { - match trait_ { - WellKnownTrait::Clone => LangItem::Clone, - WellKnownTrait::CoerceUnsized => LangItem::CoerceUnsized, - WellKnownTrait::Copy => LangItem::Copy, - WellKnownTrait::DiscriminantKind => LangItem::DiscriminantKind, - WellKnownTrait::DispatchFromDyn => LangItem::DispatchFromDyn, - WellKnownTrait::Drop => LangItem::Drop, - WellKnownTrait::Fn => LangItem::Fn, - WellKnownTrait::FnMut => LangItem::FnMut, - WellKnownTrait::FnOnce => LangItem::FnOnce, - WellKnownTrait::AsyncFn => LangItem::AsyncFn, - WellKnownTrait::AsyncFnMut => LangItem::AsyncFnMut, - WellKnownTrait::AsyncFnOnce => LangItem::AsyncFnOnce, - WellKnownTrait::Coroutine => LangItem::Coroutine, - WellKnownTrait::Sized => LangItem::Sized, - WellKnownTrait::Tuple => LangItem::Tuple, - WellKnownTrait::Unpin => LangItem::Unpin, - WellKnownTrait::Unsize => LangItem::Unsize, - WellKnownTrait::Pointee => LangItem::PointeeTrait, - WellKnownTrait::FnPtr => LangItem::FnPtrTrait, - WellKnownTrait::Future => LangItem::Future, - } -} - -pub(crate) fn adt_datum_query( - db: &dyn HirDatabase, - krate: Crate, - chalk_ir::AdtId(adt_id): AdtId, -) -> Arc { - debug!("adt_datum {:?}", adt_id); - let generic_params = generics(db, adt_id.into()); - let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); - - let (fundamental, phantom_data) = match adt_id { - hir_def::AdtId::StructId(s) => { - let flags = db.struct_signature(s).flags; - (flags.contains(StructFlags::FUNDAMENTAL), flags.contains(StructFlags::IS_PHANTOM_DATA)) - } - // FIXME set fundamental flags correctly - hir_def::AdtId::UnionId(_) => (false, false), - hir_def::AdtId::EnumId(_) => (false, false), - }; - let flags = rust_ir::AdtFlags { - upstream: adt_id.module(db).krate() != krate, - fundamental, - phantom_data, - }; - - // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it - let _variant_id_to_fields = |id: VariantId| { - let variant_data = &id.fields(db); - let fields = if variant_data.fields().is_empty() { - vec![] - } else { - let field_types = db.field_types(id); - variant_data - .fields() - .iter() - .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) - .filter(|it| !it.contains_unknown()) - .collect() - }; - rust_ir::AdtVariantDatum { fields } - }; - let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] }; - - let (kind, variants) = match adt_id { - hir_def::AdtId::StructId(id) => { - (rust_ir::AdtKind::Struct, vec![variant_id_to_fields(id.into())]) - } - hir_def::AdtId::EnumId(id) => { - let variants = id - .enum_variants(db) - .variants - .iter() - .map(|&(variant_id, _, _)| variant_id_to_fields(variant_id.into())) - .collect(); - (rust_ir::AdtKind::Enum, variants) - } - hir_def::AdtId::UnionId(id) => { - (rust_ir::AdtKind::Union, vec![variant_id_to_fields(id.into())]) - } - }; - - let struct_datum_bound = rust_ir::AdtDatumBound { variants, where_clauses }; - let struct_datum = AdtDatum { - kind, - id: chalk_ir::AdtId(adt_id), - binders: make_binders(db, &generic_params, struct_datum_bound), - flags, - }; - Arc::new(struct_datum) -} - -pub(crate) fn impl_datum_query( - db: &dyn HirDatabase, - krate: Crate, - impl_id: ImplId, -) -> Arc { - let _p = tracing::info_span!("impl_datum_query").entered(); - debug!("impl_datum {:?}", impl_id); - let impl_: hir_def::ImplId = from_chalk(db, impl_id); - impl_def_datum(db, krate, impl_) -} - -fn impl_def_datum(db: &dyn HirDatabase, krate: Crate, impl_id: hir_def::ImplId) -> Arc { - let trait_ref = db - .impl_trait(impl_id) - // ImplIds for impls where the trait ref can't be resolved should never reach Chalk - .expect("invalid impl passed to Chalk") - .into_value_and_skipped_binders() - .0; - let impl_data = db.impl_signature(impl_id); - - let generic_params = generics(db, impl_id.into()); - let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - let trait_ = trait_ref.hir_trait_id(); - let impl_type = if impl_id.lookup(db).container.krate() == krate { - rust_ir::ImplType::Local - } else { - rust_ir::ImplType::External - }; - let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars); - let negative = impl_data.flags.contains(ImplFlags::NEGATIVE); - let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive }; - - let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses }; - let trait_data = trait_.trait_items(db); - let associated_ty_value_ids = impl_id - .impl_items(db) - .items - .iter() - .filter_map(|(_, item)| match item { - AssocItemId::TypeAliasId(type_alias) => Some(*type_alias), - _ => None, - }) - .filter(|&type_alias| { - // don't include associated types that don't exist in the trait - let name = &db.type_alias_signature(type_alias).name; - trait_data.associated_type_by_name(name).is_some() - }) - .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db)) - .collect(); - debug!("impl_datum: {:?}", impl_datum_bound); - let impl_datum = ImplDatum { - binders: make_binders(db, &generic_params, impl_datum_bound), - impl_type, - polarity, - associated_ty_value_ids, - }; - Arc::new(impl_datum) -} - -pub(crate) fn associated_ty_value_query( - db: &dyn HirDatabase, - krate: Crate, - id: AssociatedTyValueId, -) -> Arc { - let type_alias: TypeAliasAsValue = from_chalk(db, id); - type_alias_associated_ty_value(db, krate, type_alias.0) -} - -fn type_alias_associated_ty_value( - db: &dyn HirDatabase, - _krate: Crate, - type_alias: TypeAliasId, -) -> Arc { - let type_alias_data = db.type_alias_signature(type_alias); - let impl_id = match type_alias.lookup(db).container { - ItemContainerId::ImplId(it) => it, - _ => panic!("assoc ty value should be in impl"), - }; - - let trait_ref = db - .impl_trait(impl_id) - .expect("assoc ty value should not exist") - .into_value_and_skipped_binders() - .0; // we don't return any assoc ty values if the impl'd trait can't be resolved - - let assoc_ty = trait_ref - .hir_trait_id() - .trait_items(db) - .associated_type_by_name(&type_alias_data.name) - .expect("assoc ty value should not exist"); // validated when building the impl data as well - let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders(); - let value_bound = rust_ir::AssociatedTyValueBound { ty }; - let value = rust_ir::AssociatedTyValue { - impl_id: impl_id.to_chalk(db), - associated_ty_id: to_assoc_type_id(assoc_ty), - value: chalk_ir::Binders::new(binders, value_bound), - }; - Arc::new(value) -} - -pub(crate) fn fn_def_datum_query( - db: &dyn HirDatabase, - callable_def: CallableDefId, -) -> Arc { - let generic_def = GenericDefId::from_callable(db, callable_def); - let generic_params = generics(db, generic_def); - let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders(); - let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - let where_clauses = convert_where_clauses(db, generic_def, &bound_vars); - let bound = rust_ir::FnDefDatumBound { - // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway - inputs_and_output: chalk_ir::Binders::empty( - Interner, - rust_ir::FnDefInputsAndOutputDatum { - argument_types: sig.params().to_vec(), - return_type: sig.ret().clone(), - } - .shifted_in(Interner), - ), - where_clauses, - }; - let datum = FnDefDatum { - id: callable_def.to_chalk(db), - sig: chalk_ir::FnSig { - abi: sig.abi, - safety: chalk_ir::Safety::Safe, - variadic: sig.is_varargs, - }, - binders: chalk_ir::Binders::new(binders, bound), - }; - Arc::new(datum) -} - pub(crate) fn fn_def_variance_query( db: &dyn HirDatabase, callable_def: CallableDefId, @@ -1021,59 +66,3 @@ pub(super) fn convert_where_clauses( .map(|pred| pred.substitute(Interner, substs)) .collect() } - -pub(super) fn generic_predicate_to_inline_bound( - db: &dyn HirDatabase, - pred: &QuantifiedWhereClause, - self_ty: &Ty, -) -> Option>> { - // An InlineBound is like a GenericPredicate, except the self type is left out. - // We don't have a special type for this, but Chalk does. - let self_ty_shifted_in = self_ty.clone().shifted_in_from(Interner, DebruijnIndex::ONE); - let (pred, binders) = pred.as_ref().into_value_and_skipped_binders(); - match pred { - WhereClause::Implemented(trait_ref) => { - if trait_ref.self_type_parameter(Interner) != self_ty_shifted_in { - // we can only convert predicates back to type bounds if they - // have the expected self type - return None; - } - let args_no_self = trait_ref.substitution.as_slice(Interner)[1..] - .iter() - .cloned() - .casted(Interner) - .collect(); - let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self }; - Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound))) - } - WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => { - let generics = generics(db, from_assoc_type_id(projection_ty.associated_ty_id).into()); - let parent_len = generics.parent_generics().map_or(0, |g| g.len_self()); - let (trait_args, assoc_args) = - projection_ty.substitution.as_slice(Interner).split_at(parent_len); - let (self_ty, args_no_self) = - trait_args.split_first().expect("projection without trait self type"); - if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in { - return None; - } - - let args_no_self = args_no_self.iter().cloned().casted(Interner).collect(); - let parameters = assoc_args.to_vec(); - - let alias_eq_bound = rust_ir::AliasEqBound { - value: ty.clone(), - trait_bound: rust_ir::TraitBound { - trait_id: to_chalk_trait_id(projection_ty.trait_(db)), - args_no_self, - }, - associated_ty_id: projection_ty.associated_ty_id, - parameters, - }; - Some(chalk_ir::Binders::new( - binders, - rust_ir::InlineBound::AliasEqBound(alias_eq_bound), - )) - } - _ => None, - } -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index 836cc96233eb8..1faf9f66dc547 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -14,10 +14,9 @@ use hir_def::{ use crate::{ AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy, - QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause, - db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, - from_placeholder_idx, generics::generics, mapping::ToChalk, to_chalk_trait_id, - utils::ClosureSubst, + QuantifiedWhereClause, Substitution, ToChalk, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, + WhereClause, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, + from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst, }; pub trait TyExt { @@ -211,7 +210,7 @@ impl TyExt for Ty { match self.kind(Interner) { TyKind::Function(fn_ptr) => Some(CallableSig::from_fn_ptr(fn_ptr)), TyKind::FnDef(def, parameters) => Some(CallableSig::from_def(db, *def, parameters)), - TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty().callable_sig(db), + TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty(db).callable_sig(db), _ => None, } } @@ -246,26 +245,30 @@ impl TyExt for Ty { } fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option> { + let handle_async_block_type_impl_trait = |def: DefWithBodyId| { + let krate = def.module(db).krate(); + if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) { + // This is only used by type walking. + // Parameters will be walked outside, and projection predicate is not used. + // So just provide the Future trait. + let impl_bound = Binders::empty( + Interner, + WhereClause::Implemented(TraitRef { + trait_id: to_chalk_trait_id(future_trait), + substitution: Substitution::empty(Interner), + }), + ); + Some(vec![impl_bound]) + } else { + None + } + }; + match self.kind(Interner) { TyKind::OpaqueType(opaque_ty_id, subst) => { match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) { ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => { - let krate = def.module(db).krate(); - if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) { - // This is only used by type walking. - // Parameters will be walked outside, and projection predicate is not used. - // So just provide the Future trait. - let impl_bound = Binders::empty( - Interner, - WhereClause::Implemented(TraitRef { - trait_id: to_chalk_trait_id(future_trait), - substitution: Substitution::empty(Interner), - }), - ); - Some(vec![impl_bound]) - } else { - None - } + handle_async_block_type_impl_trait(def) } ImplTraitId::ReturnTypeImplTrait(func, idx) => { db.return_type_impl_traits(func).map(|it| { @@ -300,14 +303,15 @@ impl TyExt for Ty { data.substitute(Interner, &opaque_ty.substitution) }) } - // It always has an parameter for Future::Output type. - ImplTraitId::AsyncBlockTypeImplTrait(..) => unreachable!(), + ImplTraitId::AsyncBlockTypeImplTrait(def, _) => { + return handle_async_block_type_impl_trait(def); + } }; predicates.map(|it| it.into_value_and_skipped_binders().0) } TyKind::Placeholder(idx) => { - let id = from_placeholder_idx(db, *idx); + let id = from_placeholder_idx(db, *idx).0; let generic_params = db.generic_params(id.parent); let param_data = &generic_params[id.local_id]; match param_data { @@ -371,7 +375,7 @@ impl TyExt for Ty { value: InEnvironment::new(&env.env, trait_ref.cast(Interner)), binders: CanonicalVarKinds::empty(Interner), }; - db.trait_solve(crate_id, None, goal).is_some() + !db.trait_solve(crate_id, None, goal).no_solution() } fn equals_ctor(&self, other: &Ty) -> bool { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index f30ec839a0096..0f2cc17f563dd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -15,8 +15,14 @@ use triomphe::Arc; use crate::{ Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, - TraitEnvironment, Ty, TyBuilder, db::HirDatabase, display::DisplayTarget, generics::Generics, - infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx, + TraitEnvironment, Ty, TyBuilder, + db::HirDatabase, + display::DisplayTarget, + generics::Generics, + infer::InferenceContext, + lower::ParamLoweringMode, + next_solver::{DbInterner, mapping::ChalkToNextSolver}, + to_placeholder_idx, }; use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16}; @@ -101,25 +107,24 @@ pub(crate) fn path_to_const<'g>( match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) { Some(ValueNs::GenericParam(p)) => { let ty = db.const_param_ty(p); + let args = args(); let value = match mode { ParamLoweringMode::Placeholder => { - ConstValue::Placeholder(to_placeholder_idx(db, p.into())) + let idx = args.type_or_const_param_idx(p.into()).unwrap(); + ConstValue::Placeholder(to_placeholder_idx(db, p.into(), idx as u32)) } - ParamLoweringMode::Variable => { - let args = args(); - match args.type_or_const_param_idx(p.into()) { - Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)), - None => { - never!( - "Generic list doesn't contain this param: {:?}, {:?}, {:?}", - args, - path, - p - ); - return None; - } + ParamLoweringMode::Variable => match args.type_or_const_param_idx(p.into()) { + Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)), + None => { + never!( + "Generic list doesn't contain this param: {:?}, {:?}, {:?}", + args, + path, + p + ); + return None; } - } + }, }; Some(ConstData { ty, value }.intern(Interner)) } @@ -157,7 +162,8 @@ pub fn intern_const_ref( ty: Ty, krate: Crate, ) -> Const { - let layout = || db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate)); + let interner = DbInterner::new_with(db, Some(krate), None); + let layout = || db.layout_of_ty(ty.to_nextsolver(interner), TraitEnvironment::empty(krate)); let bytes = match value { LiteralConstRef::Int(i) => { // FIXME: We should handle failure of layout better. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index 6449a4dc7e8c6..299b73a7d6cc4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -11,7 +11,7 @@ use test_utils::skip_slow_tests; use crate::{ Const, ConstScalar, Interner, MemoryMap, consteval::try_const_usize, db::HirDatabase, - display::DisplayTarget, mir::pad16, test_db::TestDB, + display::DisplayTarget, mir::pad16, setup_tracing, test_db::TestDB, }; use super::{ @@ -76,7 +76,7 @@ fn check_str(#[rust_analyzer::rust_fixture] ra_fixture: &str, answer: &str) { #[track_caller] fn check_answer( #[rust_analyzer::rust_fixture] ra_fixture: &str, - check: impl FnOnce(&[u8], &MemoryMap), + check: impl FnOnce(&[u8], &MemoryMap<'_>), ) { let (db, file_ids) = TestDB::with_many_files(ra_fixture); let file_id = *file_ids.last().unwrap(); @@ -116,6 +116,7 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String { } fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result { + let _tracing = setup_tracing(); let module_id = db.module_for_file(file_id.file_id(db)); let def_map = module_id.def_map(db); let scope = &def_map[module_id.local_id].scope; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval_nextsolver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval_nextsolver.rs new file mode 100644 index 0000000000000..6e07d3afe5524 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval_nextsolver.rs @@ -0,0 +1,256 @@ +//! Constant evaluation details +// FIXME(next-solver): this should get removed as things get moved to rustc_type_ir from chalk_ir +#![allow(unused)] + +use base_db::Crate; +use hir_def::{ + EnumVariantId, GeneralConstId, + expr_store::{Body, HygieneId, path::Path}, + hir::{Expr, ExprId}, + resolver::{Resolver, ValueNs}, + type_ref::LiteralConstRef, +}; +use hir_expand::Lookup; +use rustc_type_ir::{ + UnevaluatedConst, + inherent::{IntoKind, SliceLike}, +}; +use stdx::never; +use triomphe::Arc; + +use crate::{ + ConstScalar, Interner, MemoryMap, Substitution, TraitEnvironment, + consteval::ConstEvalError, + db::HirDatabase, + generics::Generics, + infer::InferenceContext, + next_solver::{ + Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, + ParamConst, SolverDefId, Ty, ValueConst, + mapping::{ChalkToNextSolver, NextSolverToChalk, convert_binder_to_early_binder}, + }, +}; + +use super::mir::{interpret_mir, lower_to_mir, pad16}; + +pub(crate) fn path_to_const<'a, 'g>( + db: &'a dyn HirDatabase, + resolver: &Resolver<'a>, + path: &Path, + args: impl FnOnce() -> &'g Generics, + expected_ty: Ty<'a>, +) -> Option> { + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) { + Some(ValueNs::GenericParam(p)) => { + let args = args(); + match args + .type_or_const_param(p.into()) + .and_then(|(idx, p)| p.const_param().map(|p| (idx, p.clone()))) + { + Some((idx, _param)) => { + Some(Const::new_param(interner, ParamConst { index: idx as u32, id: p })) + } + None => { + never!( + "Generic list doesn't contain this param: {:?}, {:?}, {:?}", + args, + path, + p + ); + None + } + } + } + Some(ValueNs::ConstId(c)) => { + let args = GenericArgs::new_from_iter(interner, []); + Some(Const::new( + interner, + rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new( + SolverDefId::ConstId(c), + args, + )), + )) + } + _ => None, + } +} + +pub fn unknown_const<'db>(ty: Ty<'db>) -> Const<'db> { + Const::new(DbInterner::conjure(), rustc_type_ir::ConstKind::Error(ErrorGuaranteed)) +} + +pub fn unknown_const_as_generic<'db>(ty: Ty<'db>) -> GenericArg<'db> { + unknown_const(ty).into() +} + +/// Interns a constant scalar with the given type +pub fn intern_const_ref<'a>( + db: &'a dyn HirDatabase, + value: &LiteralConstRef, + ty: Ty<'a>, + krate: Crate, +) -> Const<'a> { + let interner = DbInterner::new_with(db, Some(krate), None); + let layout = db.layout_of_ty(ty, TraitEnvironment::empty(krate)); + let kind = match value { + LiteralConstRef::Int(i) => { + // FIXME: We should handle failure of layout better. + let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); + rustc_type_ir::ConstKind::Value(ValueConst::new( + ty, + ConstBytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()), + )) + } + LiteralConstRef::UInt(i) => { + let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); + rustc_type_ir::ConstKind::Value(ValueConst::new( + ty, + ConstBytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()), + )) + } + LiteralConstRef::Bool(b) => rustc_type_ir::ConstKind::Value(ValueConst::new( + ty, + ConstBytes(Box::new([*b as u8]), MemoryMap::default()), + )), + LiteralConstRef::Char(c) => rustc_type_ir::ConstKind::Value(ValueConst::new( + ty, + ConstBytes((*c as u32).to_le_bytes().into(), MemoryMap::default()), + )), + LiteralConstRef::Unknown => rustc_type_ir::ConstKind::Error(ErrorGuaranteed), + }; + Const::new(interner, kind) +} + +/// Interns a possibly-unknown target usize +pub fn usize_const<'db>(db: &'db dyn HirDatabase, value: Option, krate: Crate) -> Const<'db> { + intern_const_ref( + db, + &value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt), + Ty::new_uint(DbInterner::new_with(db, Some(krate), None), rustc_type_ir::UintTy::Usize), + krate, + ) +} + +pub fn try_const_usize<'db>(db: &'db dyn HirDatabase, c: Const<'db>) -> Option { + let interner = DbInterner::new_with(db, None, None); + match c.kind() { + ConstKind::Param(_) => None, + ConstKind::Infer(_) => None, + ConstKind::Bound(_, _) => None, + ConstKind::Placeholder(_) => None, + ConstKind::Unevaluated(unevaluated_const) => { + let c = match unevaluated_const.def { + SolverDefId::ConstId(id) => GeneralConstId::ConstId(id), + SolverDefId::StaticId(id) => GeneralConstId::StaticId(id), + _ => unreachable!(), + }; + let subst = unevaluated_const.args.to_chalk(interner); + let ec = db.const_eval(c, subst, None).ok()?.to_nextsolver(interner); + try_const_usize(db, ec) + } + ConstKind::Value(val) => Some(u128::from_le_bytes(pad16(&val.value.inner().0, false))), + ConstKind::Error(_) => None, + ConstKind::Expr(_) => None, + } +} + +pub fn try_const_isize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option { + let interner = DbInterner::new_with(db, None, None); + match (*c).kind() { + ConstKind::Param(_) => None, + ConstKind::Infer(_) => None, + ConstKind::Bound(_, _) => None, + ConstKind::Placeholder(_) => None, + ConstKind::Unevaluated(unevaluated_const) => { + let c = match unevaluated_const.def { + SolverDefId::ConstId(id) => GeneralConstId::ConstId(id), + SolverDefId::StaticId(id) => GeneralConstId::StaticId(id), + _ => unreachable!(), + }; + let subst = unevaluated_const.args.to_chalk(interner); + let ec = db.const_eval(c, subst, None).ok()?.to_nextsolver(interner); + try_const_isize(db, &ec) + } + ConstKind::Value(val) => Some(i128::from_le_bytes(pad16(&val.value.inner().0, true))), + ConstKind::Error(_) => None, + ConstKind::Expr(_) => None, + } +} + +pub(crate) fn const_eval_discriminant_variant( + db: &dyn HirDatabase, + variant_id: EnumVariantId, +) -> Result { + let interner = DbInterner::new_with(db, None, None); + let def = variant_id.into(); + let body = db.body(def); + let loc = variant_id.lookup(db); + if matches!(body[body.body_expr], Expr::Missing) { + let prev_idx = loc.index.checked_sub(1); + let value = match prev_idx { + Some(prev_idx) => { + 1 + db.const_eval_discriminant( + loc.parent.enum_variants(db).variants[prev_idx as usize].0, + )? + } + _ => 0, + }; + return Ok(value); + } + + let repr = db.enum_signature(loc.parent).repr; + let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed()); + + let mir_body = db.monomorphized_mir_body( + def, + Substitution::empty(Interner), + db.trait_environment_for_body(def), + )?; + let c = interpret_mir(db, mir_body, false, None)?.0?; + let c = c.to_nextsolver(interner); + let c = if is_signed { + try_const_isize(db, &c).unwrap() + } else { + try_const_usize(db, c).unwrap() as i128 + }; + Ok(c) +} + +// FIXME: Ideally constants in const eval should have separate body (issue #7434), and this function should +// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here +// and make this function private. See the fixme comment on `InferenceContext::resolve_all`. +pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'db>) -> Const<'db> { + let interner = DbInterner::new_with(ctx.db, None, None); + let infer = ctx.clone().resolve_all(); + fn has_closure(body: &Body, expr: ExprId) -> bool { + if matches!(body[expr], Expr::Closure { .. }) { + return true; + } + let mut r = false; + body.walk_child_exprs(expr, |idx| r |= has_closure(body, idx)); + r + } + if has_closure(ctx.body, expr) { + // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic. + return unknown_const(infer[expr].clone().to_nextsolver(interner)); + } + if let Expr::Path(p) = &ctx.body[expr] { + let resolver = &ctx.resolver; + if let Some(c) = path_to_const( + ctx.db, + resolver, + p, + || ctx.generics(), + infer[expr].to_nextsolver(interner), + ) { + return c; + } + } + if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) + && let Ok((Ok(result), _)) = interpret_mir(ctx.db, Arc::new(mir_body), true, None) + { + return result.to_nextsolver(interner); + } + unknown_const(infer[expr].to_nextsolver(interner)) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index b3d46845c443a..448fc4aede037 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -1,9 +1,8 @@ //! The home of `HirDatabase`, which is the Salsa database containing all the //! type inference-related queries. -use std::sync; - -use base_db::{Crate, impl_intern_key}; +use base_db::Crate; +use base_db::target::TargetLoadError; use hir_def::{ AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, @@ -17,7 +16,7 @@ use smallvec::SmallVec; use triomphe::Arc; use crate::{ - Binders, Const, ImplTraitId, ImplTraits, InferenceResult, Interner, PolyFnSig, Substitution, + Binders, Const, ImplTraitId, ImplTraits, InferenceResult, PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, chalk_db, consteval::ConstEvalError, drop::DropGlue, @@ -26,6 +25,7 @@ use crate::{ lower::{Diagnostics, GenericDefaults, GenericPredicates}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, + traits::NextTraitSolveResult, }; #[query_group::query_group] @@ -93,19 +93,23 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::invoke(crate::layout::layout_of_adt_query)] #[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)] - fn layout_of_adt( - &self, + fn layout_of_adt<'db>( + &'db self, def: AdtId, - subst: Substitution, - env: Arc, + args: crate::next_solver::GenericArgs<'db>, + trait_env: Arc, ) -> Result, LayoutError>; #[salsa::invoke(crate::layout::layout_of_ty_query)] #[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)] - fn layout_of_ty(&self, ty: Ty, env: Arc) -> Result, LayoutError>; + fn layout_of_ty<'db>( + &'db self, + ty: crate::next_solver::Ty<'db>, + env: Arc, + ) -> Result, LayoutError>; #[salsa::invoke(crate::layout::target_data_layout_query)] - fn target_data_layout(&self, krate: Crate) -> Result, Arc>; + fn target_data_layout(&self, krate: Crate) -> Result, TargetLoadError>; #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)] fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option; @@ -178,16 +182,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::invoke(crate::lower::generic_predicates_query)] fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates; - #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)] - fn generic_predicates_without_parent_with_diagnostics( - &self, - def: GenericDefId, - ) -> (GenericPredicates, Diagnostics); - - #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)] - #[salsa::transparent] - fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates; - #[salsa::invoke(crate::lower::trait_environment_for_body_query)] #[salsa::transparent] fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc; @@ -245,26 +239,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::interned] fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId; - #[salsa::invoke(chalk_db::associated_ty_data_query)] - fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc; - - #[salsa::invoke(chalk_db::trait_datum_query)] - fn trait_datum( - &self, - krate: Crate, - trait_id: chalk_db::TraitId, - ) -> sync::Arc; - - #[salsa::invoke(chalk_db::adt_datum_query)] - fn adt_datum(&self, krate: Crate, struct_id: chalk_db::AdtId) -> sync::Arc; - - #[salsa::invoke(chalk_db::impl_datum_query)] - fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId) - -> sync::Arc; - - #[salsa::invoke(chalk_db::fn_def_datum_query)] - fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc; - #[salsa::invoke(chalk_db::fn_def_variance_query)] fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances; @@ -279,13 +253,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { )] fn variances_of(&self, def: GenericDefId) -> Option>; - #[salsa::invoke(chalk_db::associated_ty_value_query)] - fn associated_ty_value( - &self, - krate: Crate, - id: chalk_db::AssociatedTyValueId, - ) -> sync::Arc; - #[salsa::invoke(crate::traits::normalize_projection_query)] #[salsa::transparent] fn normalize_projection( @@ -295,24 +262,153 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { ) -> Ty; #[salsa::invoke(crate::traits::trait_solve_query)] + #[salsa::transparent] fn trait_solve( &self, krate: Crate, block: Option, goal: crate::Canonical>, - ) -> Option; - - #[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)] - fn program_clauses_for_chalk_env( - &self, - krate: Crate, - block: Option, - env: chalk_ir::Environment, - ) -> chalk_ir::ProgramClauses; + ) -> NextTraitSolveResult; #[salsa::invoke(crate::drop::has_drop_glue)] #[salsa::cycle(cycle_result = crate::drop::has_drop_glue_cycle_result)] fn has_drop_glue(&self, ty: Ty, env: Arc) -> DropGlue; + + // next trait solver + + #[salsa::invoke(crate::lower_nextsolver::ty_query)] + #[salsa::transparent] + fn ty_ns<'db>( + &'db self, + def: TyDefId, + ) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>; + + /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is + /// a `StructId` or `EnumVariantId` with a record constructor. + #[salsa::invoke(crate::lower_nextsolver::value_ty_query)] + fn value_ty_ns<'db>( + &'db self, + def: ValueTyDefId, + ) -> Option>>; + + #[salsa::invoke(crate::lower_nextsolver::type_for_type_alias_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower_nextsolver::type_for_type_alias_with_diagnostics_cycle_result)] + fn type_for_type_alias_with_diagnostics_ns<'db>( + &'db self, + def: TypeAliasId, + ) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics); + + #[salsa::invoke(crate::lower_nextsolver::impl_self_ty_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower_nextsolver::impl_self_ty_with_diagnostics_cycle_result)] + fn impl_self_ty_with_diagnostics_ns<'db>( + &'db self, + def: ImplId, + ) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics); + + #[salsa::invoke(crate::lower_nextsolver::impl_self_ty_query)] + #[salsa::transparent] + fn impl_self_ty_ns<'db>( + &'db self, + def: ImplId, + ) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>; + + // FIXME: Make this a non-interned query. + #[salsa::invoke_interned(crate::lower_nextsolver::const_param_ty_with_diagnostics_query)] + fn const_param_ty_with_diagnostics_ns<'db>( + &'db self, + def: ConstParamId, + ) -> (crate::next_solver::Ty<'db>, Diagnostics); + + #[salsa::invoke(crate::lower_nextsolver::const_param_ty_query)] + #[salsa::transparent] + fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> crate::next_solver::Ty<'db>; + + #[salsa::invoke(crate::lower_nextsolver::impl_trait_with_diagnostics_query)] + fn impl_trait_with_diagnostics_ns<'db>( + &'db self, + def: ImplId, + ) -> Option<( + crate::next_solver::EarlyBinder<'db, crate::next_solver::TraitRef<'db>>, + Diagnostics, + )>; + + #[salsa::invoke(crate::lower_nextsolver::impl_trait_query)] + #[salsa::transparent] + fn impl_trait_ns<'db>( + &'db self, + def: ImplId, + ) -> Option>>; + + #[salsa::invoke(crate::lower_nextsolver::field_types_with_diagnostics_query)] + fn field_types_with_diagnostics_ns<'db>( + &'db self, + var: VariantId, + ) -> ( + Arc< + ArenaMap< + LocalFieldId, + crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, + >, + >, + Diagnostics, + ); + + #[salsa::invoke(crate::lower_nextsolver::field_types_query)] + #[salsa::transparent] + fn field_types_ns<'db>( + &'db self, + var: VariantId, + ) -> Arc< + ArenaMap>>, + >; + + #[salsa::invoke(crate::lower_nextsolver::callable_item_signature_query)] + fn callable_item_signature_ns<'db>( + &'db self, + def: CallableDefId, + ) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::PolyFnSig<'db>>; + + #[salsa::invoke(crate::lower_nextsolver::return_type_impl_traits)] + fn return_type_impl_traits_ns<'db>( + &'db self, + def: FunctionId, + ) -> Option>>>; + + #[salsa::invoke(crate::lower_nextsolver::type_alias_impl_traits)] + fn type_alias_impl_traits_ns<'db>( + &'db self, + def: TypeAliasId, + ) -> Option>>>; + + #[salsa::invoke(crate::lower_nextsolver::generic_predicates_for_param_query)] + #[salsa::cycle(cycle_result = crate::lower_nextsolver::generic_predicates_for_param_cycle_result)] + fn generic_predicates_for_param_ns<'db>( + &'db self, + def: GenericDefId, + param_id: TypeOrConstParamId, + assoc_name: Option, + ) -> crate::lower_nextsolver::GenericPredicates<'db>; + + #[salsa::invoke(crate::lower_nextsolver::generic_predicates_query)] + fn generic_predicates_ns<'db>( + &'db self, + def: GenericDefId, + ) -> crate::lower_nextsolver::GenericPredicates<'db>; + + #[salsa::invoke( + crate::lower_nextsolver::generic_predicates_without_parent_with_diagnostics_query + )] + fn generic_predicates_without_parent_with_diagnostics_ns<'db>( + &'db self, + def: GenericDefId, + ) -> (crate::lower_nextsolver::GenericPredicates<'db>, Diagnostics); + + #[salsa::invoke(crate::lower_nextsolver::generic_predicates_without_parent_query)] + #[salsa::transparent] + fn generic_predicates_without_parent_ns<'db>( + &'db self, + def: GenericDefId, + ) -> crate::lower_nextsolver::GenericPredicates<'db>; } #[test] @@ -320,40 +416,46 @@ fn hir_database_is_dyn_compatible() { fn _assert_dyn_compatible(_: &dyn HirDatabase) {} } -#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)] +#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] #[derive(PartialOrd, Ord)] pub struct InternedTypeOrConstParamId { - pub loc: TypeOrConstParamId, -} -impl ::std::fmt::Debug for InternedTypeOrConstParamId { - fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { - f.debug_tuple(stringify!(InternedTypeOrConstParamId)) - .field(&format_args!("{:04x}", self.0.index())) - .finish() - } + /// This stores the param and its index. + pub loc: (TypeOrConstParamId, u32), } -#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)] +#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] #[derive(PartialOrd, Ord)] pub struct InternedLifetimeParamId { - pub loc: LifetimeParamId, -} -impl ::std::fmt::Debug for InternedLifetimeParamId { - fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { - f.debug_tuple(stringify!(InternedLifetimeParamId)) - .field(&format_args!("{:04x}", self.0.index())) - .finish() - } + /// This stores the param and its index. + pub loc: (LifetimeParamId, u32), } -impl_intern_key!(InternedConstParamId, ConstParamId); +#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] +#[derive(PartialOrd, Ord)] +pub struct InternedConstParamId { + pub loc: ConstParamId, +} -impl_intern_key!(InternedOpaqueTyId, ImplTraitId); +#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] +#[derive(PartialOrd, Ord)] +pub struct InternedOpaqueTyId { + pub loc: ImplTraitId, +} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct InternedClosure(pub DefWithBodyId, pub ExprId); -impl_intern_key!(InternedClosureId, InternedClosure); + +#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] +#[derive(PartialOrd, Ord)] +pub struct InternedClosureId { + pub loc: InternedClosure, +} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId); -impl_intern_key!(InternedCoroutineId, InternedCoroutine); + +#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] +#[derive(PartialOrd, Ord)] +pub struct InternedCoroutineId { + pub loc: InternedCoroutine, +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index b26bd2b8fa9c4..403ea05a4f53c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -5,7 +5,6 @@ use std::fmt; use base_db::Crate; -use chalk_solve::rust_ir::AdtKind; use either::Either; use hir_def::{ AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup, @@ -300,11 +299,7 @@ impl ExprValidator { value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_))) } Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) { - TyKind::Adt(adt, ..) - if db.adt_datum(self.owner.krate(db), *adt).kind == AdtKind::Union => - { - false - } + TyKind::Adt(adt, ..) if matches!(adt.0, AdtId::UnionId(_)) => false, _ => self.is_known_valid_scrutinee(*expr, db), }, Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base, db), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 827585e50693a..3f04b72c2fc68 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -14,9 +14,11 @@ use hir_def::{ }; use span::Edition; +use crate::utils::TargetFeatureIsSafeInTarget; use crate::{ - InferenceResult, Interner, TargetFeatures, TyExt, TyKind, db::HirDatabase, - utils::is_fn_unsafe_to_call, + InferenceResult, Interner, TargetFeatures, TyExt, TyKind, + db::HirDatabase, + utils::{is_fn_unsafe_to_call, target_feature_is_safe_in_target}, }; #[derive(Debug, Default)] @@ -119,11 +121,11 @@ pub fn unsafe_operations( def: DefWithBodyId, body: &Body, current: ExprId, - callback: &mut dyn FnMut(InsideUnsafeBlock), + callback: &mut dyn FnMut(ExprOrPatId, InsideUnsafeBlock), ) { let mut visitor_callback = |diag| { - if let UnsafeDiagnostic::UnsafeOperation { inside_unsafe_block, .. } = diag { - callback(inside_unsafe_block); + if let UnsafeDiagnostic::UnsafeOperation { inside_unsafe_block, node, .. } = diag { + callback(node, inside_unsafe_block); } }; let mut visitor = UnsafeVisitor::new(db, infer, body, def, &mut visitor_callback); @@ -144,6 +146,9 @@ struct UnsafeVisitor<'db> { def_target_features: TargetFeatures, // FIXME: This needs to be the edition of the span of each call. edition: Edition, + /// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when + /// the target feature is not enabled. This flag encodes that. + target_feature_is_safe: TargetFeatureIsSafeInTarget, } impl<'db> UnsafeVisitor<'db> { @@ -159,7 +164,12 @@ impl<'db> UnsafeVisitor<'db> { DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())), _ => TargetFeatures::default(), }; - let edition = resolver.module().krate().data(db).edition; + let krate = resolver.module().krate(); + let edition = krate.data(db).edition; + let target_feature_is_safe = match &krate.workspace_data(db).target { + Ok(target) => target_feature_is_safe_in_target(target), + Err(_) => TargetFeatureIsSafeInTarget::No, + }; Self { db, infer, @@ -172,6 +182,7 @@ impl<'db> UnsafeVisitor<'db> { callback: unsafe_expr_cb, def_target_features, edition, + target_feature_is_safe, } } @@ -184,7 +195,13 @@ impl<'db> UnsafeVisitor<'db> { } fn check_call(&mut self, node: ExprId, func: FunctionId) { - let unsafety = is_fn_unsafe_to_call(self.db, func, &self.def_target_features, self.edition); + let unsafety = is_fn_unsafe_to_call( + self.db, + func, + &self.def_target_features, + self.edition, + self.target_feature_is_safe, + ); match unsafety { crate::utils::Unsafety::Safe => {} crate::utils::Unsafety::Unsafe => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 8f35a3c214551..519e4b59237f4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -11,7 +11,7 @@ use base_db::Crate; use chalk_ir::{BoundVar, Safety, TyKind}; use either::Either; use hir_def::{ - GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, + FindPathConfig, GeneralConstId, GenericDefId, HasModule, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId, db::DefDatabase, expr_store::{ExpressionStore, path::Path}, @@ -37,26 +37,36 @@ use rustc_apfloat::{ ieee::{Half as f16, Quad as f128}, }; use rustc_hash::FxHashSet; +use rustc_type_ir::{ + AliasTyKind, CoroutineArgsParts, RegionKind, + inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike}, +}; use smallvec::SmallVec; use span::Edition; use stdx::never; use triomphe::Arc; +use crate::next_solver::infer::DbInternerInferExt; +use crate::next_solver::infer::traits::ObligationCause; use crate::{ - AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, - ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, - LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, - QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, - TyExt, WhereClause, - consteval::try_const_usize, + AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, ConstScalar, + ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, + LifetimeOutlives, MemoryMap, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, + TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause, consteval_nextsolver, db::{HirDatabase, InternedClosure}, - from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, + from_assoc_type_id, from_placeholder_idx, generics::generics, infer::normalize, layout::Layout, lt_from_placeholder_idx, - mapping::from_chalk, mir::pad16, + next_solver::{ + BoundExistentialPredicate, DbInterner, GenericArgs, SolverDefId, + mapping::{ + ChalkToNextSolver, convert_args_for_result, convert_const_for_result, + convert_region_for_result, convert_ty_for_result, + }, + }, primitive, to_assoc_type_id, utils::{self, ClosureSubst, detect_variant_from_bytes}, }; @@ -185,6 +195,29 @@ impl HirFormatter<'_> { DisplayLifetime::Never => false, } } + + fn render_region(&self, lifetime: crate::next_solver::Region<'_>) -> bool { + match self.display_lifetimes { + DisplayLifetime::Always => true, + DisplayLifetime::OnlyStatic => { + matches!(lifetime.kind(), rustc_type_ir::RegionKind::ReStatic) + } + DisplayLifetime::OnlyNamed => { + matches!( + lifetime.kind(), + rustc_type_ir::RegionKind::RePlaceholder(_) + | rustc_type_ir::RegionKind::ReEarlyParam(_) + ) + } + DisplayLifetime::OnlyNamedOrStatic => matches!( + lifetime.kind(), + rustc_type_ir::RegionKind::ReStatic + | rustc_type_ir::RegionKind::RePlaceholder(_) + | rustc_type_ir::RegionKind::ReEarlyParam(_) + ), + DisplayLifetime::Never => false, + } + } } pub trait HirDisplay { @@ -476,10 +509,6 @@ impl DisplayKind { matches!(self, Self::SourceCode { .. }) } - fn is_test(self) -> bool { - matches!(self, Self::Test) - } - fn allows_opaque(self) -> bool { match self { Self::SourceCode { allow_opaque, .. } => allow_opaque, @@ -613,7 +642,7 @@ impl HirDisplay for ProjectionTy { && !f.bounds_formatting_ctx.contains(self) { let db = f.db; - let id = from_placeholder_idx(db, *idx); + let id = from_placeholder_idx(db, *idx).0; let generics = generics(db, id.parent); let substs = generics.placeholder_subst(db); @@ -688,28 +717,55 @@ impl HirDisplay for GenericArg { } } +impl<'db> HirDisplay for crate::next_solver::GenericArg<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { + match self.kind() { + rustc_type_ir::GenericArgKind::Type(ty) => ty.hir_fmt(f), + rustc_type_ir::GenericArgKind::Lifetime(lt) => lt.hir_fmt(f), + rustc_type_ir::GenericArgKind::Const(c) => c.hir_fmt(f), + } + } +} + impl HirDisplay for Const { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - let data = self.interned(); - match &data.value { - ConstValue::BoundVar(idx) => idx.hir_fmt(f), - ConstValue::InferenceVar(..) => write!(f, "#c#"), - ConstValue::Placeholder(idx) => { - let id = from_placeholder_idx(f.db, *idx); - let generics = generics(f.db, id.parent); - let param_data = &generics[id.local_id]; + let c = self.to_nextsolver(DbInterner::new_with(f.db, None, None)); + c.hir_fmt(f) + } +} + +impl<'db> HirDisplay for crate::next_solver::Const<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { + match self.kind() { + rustc_type_ir::ConstKind::Placeholder(_) => write!(f, ""), + rustc_type_ir::ConstKind::Bound(db, bound_const) => { + write!(f, "?{}.{}", db.as_u32(), bound_const.var.as_u32()) + } + rustc_type_ir::ConstKind::Infer(..) => write!(f, "#c#"), + rustc_type_ir::ConstKind::Param(param) => { + let generics = generics(f.db, param.id.parent()); + let param_data = &generics[param.id.local_id()]; write!(f, "{}", param_data.name().unwrap().display(f.db, f.edition()))?; Ok(()) } - ConstValue::Concrete(c) => match &c.interned { - ConstScalar::Bytes(b, m) => render_const_scalar(f, b, m, &data.ty), - ConstScalar::UnevaluatedConst(c, parameters) => { - write!(f, "{}", c.name(f.db))?; - hir_fmt_generics(f, parameters.as_slice(Interner), c.generic_def(f.db), None)?; - Ok(()) - } - ConstScalar::Unknown => f.write_char('_'), - }, + rustc_type_ir::ConstKind::Value(const_bytes) => render_const_scalar_ns( + f, + &const_bytes.value.inner().0, + &const_bytes.value.inner().1, + const_bytes.ty, + ), + rustc_type_ir::ConstKind::Unevaluated(unev) => { + let c = match unev.def { + SolverDefId::ConstId(id) => GeneralConstId::ConstId(id), + SolverDefId::StaticId(id) => GeneralConstId::StaticId(id), + _ => unreachable!(), + }; + write!(f, "{}", c.name(f.db))?; + hir_fmt_generics_ns(f, unev.args.as_slice(), c.generic_def(f.db), None)?; + Ok(()) + } + rustc_type_ir::ConstKind::Error(..) => f.write_char('_'), + rustc_type_ir::ConstKind::Expr(..) => write!(f, ""), } } } @@ -717,63 +773,90 @@ impl HirDisplay for Const { fn render_const_scalar( f: &mut HirFormatter<'_>, b: &[u8], - memory_map: &MemoryMap, + memory_map: &MemoryMap<'_>, ty: &Ty, ) -> Result<(), HirDisplayError> { let trait_env = TraitEnvironment::empty(f.krate()); + let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block); let ty = normalize(f.db, trait_env.clone(), ty.clone()); - match ty.kind(Interner) { - TyKind::Scalar(s) => match s { - Scalar::Bool => write!(f, "{}", b[0] != 0), - Scalar::Char => { - let it = u128::from_le_bytes(pad16(b, false)) as u32; - let Ok(c) = char::try_from(it) else { - return f.write_str(""); - }; - write!(f, "{c:?}") - } - Scalar::Int(_) => { - let it = i128::from_le_bytes(pad16(b, true)); - write!(f, "{it}") - } - Scalar::Uint(_) => { - let it = u128::from_le_bytes(pad16(b, false)); - write!(f, "{it}") - } - Scalar::Float(fl) => match fl { - chalk_ir::FloatTy::F16 => { - // FIXME(#17451): Replace with builtins once they are stabilised. - let it = f16::from_bits(u16::from_le_bytes(b.try_into().unwrap()).into()); - let s = it.to_string(); - if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) { - // Match Rust debug formatting - write!(f, "{s}.0") - } else { - write!(f, "{s}") - } - } - chalk_ir::FloatTy::F32 => { - let it = f32::from_le_bytes(b.try_into().unwrap()); - write!(f, "{it:?}") - } - chalk_ir::FloatTy::F64 => { - let it = f64::from_le_bytes(b.try_into().unwrap()); - write!(f, "{it:?}") + let ty = ty.to_nextsolver(interner); + render_const_scalar_inner(f, b, memory_map, ty, trait_env) +} + +fn render_const_scalar_ns( + f: &mut HirFormatter<'_>, + b: &[u8], + memory_map: &MemoryMap<'_>, + ty: crate::next_solver::Ty<'_>, +) -> Result<(), HirDisplayError> { + let trait_env = TraitEnvironment::empty(f.krate()); + let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block); + let infcx = interner.infer_ctxt().build(rustc_type_ir::TypingMode::PostAnalysis); + let ty = infcx + .at(&ObligationCause::new(), trait_env.env.to_nextsolver(interner)) + .deeply_normalize(ty) + .unwrap_or(ty); + render_const_scalar_inner(f, b, memory_map, ty, trait_env) +} + +fn render_const_scalar_inner( + f: &mut HirFormatter<'_>, + b: &[u8], + memory_map: &MemoryMap<'_>, + ty: crate::next_solver::Ty<'_>, + trait_env: Arc, +) -> Result<(), HirDisplayError> { + use rustc_type_ir::TyKind; + match ty.kind() { + TyKind::Bool => write!(f, "{}", b[0] != 0), + TyKind::Char => { + let it = u128::from_le_bytes(pad16(b, false)) as u32; + let Ok(c) = char::try_from(it) else { + return f.write_str(""); + }; + write!(f, "{c:?}") + } + TyKind::Int(_) => { + let it = i128::from_le_bytes(pad16(b, true)); + write!(f, "{it}") + } + TyKind::Uint(_) => { + let it = u128::from_le_bytes(pad16(b, false)); + write!(f, "{it}") + } + TyKind::Float(fl) => match fl { + rustc_type_ir::FloatTy::F16 => { + // FIXME(#17451): Replace with builtins once they are stabilised. + let it = f16::from_bits(u16::from_le_bytes(b.try_into().unwrap()).into()); + let s = it.to_string(); + if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) { + // Match Rust debug formatting + write!(f, "{s}.0") + } else { + write!(f, "{s}") } - chalk_ir::FloatTy::F128 => { - // FIXME(#17451): Replace with builtins once they are stabilised. - let it = f128::from_bits(u128::from_le_bytes(b.try_into().unwrap())); - let s = it.to_string(); - if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) { - // Match Rust debug formatting - write!(f, "{s}.0") - } else { - write!(f, "{s}") - } + } + rustc_type_ir::FloatTy::F32 => { + let it = f32::from_le_bytes(b.try_into().unwrap()); + write!(f, "{it:?}") + } + rustc_type_ir::FloatTy::F64 => { + let it = f64::from_le_bytes(b.try_into().unwrap()); + write!(f, "{it:?}") + } + rustc_type_ir::FloatTy::F128 => { + // FIXME(#17451): Replace with builtins once they are stabilised. + let it = f128::from_bits(u128::from_le_bytes(b.try_into().unwrap())); + let s = it.to_string(); + if s.strip_prefix('-').unwrap_or(&s).chars().all(|c| c.is_ascii_digit()) { + // Match Rust debug formatting + write!(f, "{s}.0") + } else { + write!(f, "{s}") } - }, + } }, - TyKind::Ref(_, _, t) => match t.kind(Interner) { + TyKind::Ref(_, t, _) => match t.kind() { TyKind::Str => { let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); let size = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); @@ -786,7 +869,7 @@ fn render_const_scalar( TyKind::Slice(ty) => { let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); - let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env) else { + let Ok(layout) = f.db.layout_of_ty(ty, trait_env) else { return f.write_str(""); }; let size_one = layout.size.bytes_usize(); @@ -810,17 +893,17 @@ fn render_const_scalar( f.write_str(", ")?; } let offset = size_one * i; - render_const_scalar(f, &bytes[offset..offset + size_one], memory_map, ty)?; + render_const_scalar_ns(f, &bytes[offset..offset + size_one], memory_map, ty)?; } f.write_str("]") } - TyKind::Dyn(_) => { + TyKind::Dynamic(_, _) => { let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); let ty_id = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); let Ok(t) = memory_map.vtable_ty(ty_id) else { return f.write_str(""); }; - let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else { + let Ok(layout) = f.db.layout_of_ty(t, trait_env) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -828,9 +911,9 @@ fn render_const_scalar( return f.write_str(""); }; f.write_str("&")?; - render_const_scalar(f, bytes, memory_map, t) + render_const_scalar_ns(f, bytes, memory_map, t) } - TyKind::Adt(adt, _) if b.len() == 2 * size_of::() => match adt.0 { + TyKind::Adt(adt, _) if b.len() == 2 * size_of::() => match adt.def_id().0 { hir_def::AdtId::StructId(s) => { let data = f.db.struct_signature(s); write!(f, "&{}", data.name.display(f.db, f.edition()))?; @@ -850,7 +933,7 @@ fn render_const_scalar( return f.write_str(""); } }); - let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else { + let Ok(layout) = f.db.layout_of_ty(t, trait_env) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -858,37 +941,37 @@ fn render_const_scalar( return f.write_str(""); }; f.write_str("&")?; - render_const_scalar(f, bytes, memory_map, t) + render_const_scalar_ns(f, bytes, memory_map, t) } }, - TyKind::Tuple(_, subst) => { - let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else { + TyKind::Tuple(tys) => { + let Ok(layout) = f.db.layout_of_ty(ty, trait_env.clone()) else { return f.write_str(""); }; f.write_str("(")?; let mut first = true; - for (id, ty) in subst.iter(Interner).enumerate() { + for (id, ty) in tys.iter().enumerate() { if first { first = false; } else { f.write_str(", ")?; } - let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument let offset = layout.fields.offset(id).bytes_usize(); - let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else { + let Ok(layout) = f.db.layout_of_ty(ty, trait_env.clone()) else { f.write_str("")?; continue; }; let size = layout.size.bytes_usize(); - render_const_scalar(f, &b[offset..offset + size], memory_map, ty)?; + render_const_scalar_ns(f, &b[offset..offset + size], memory_map, ty)?; } f.write_str(")") } - TyKind::Adt(adt, subst) => { - let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone(), trait_env.clone()) else { + TyKind::Adt(def, args) => { + let def = def.def_id().0; + let Ok(layout) = f.db.layout_of_adt(def, args, trait_env.clone()) else { return f.write_str(""); }; - match adt.0 { + match def { hir_def::AdtId::StructId(s) => { let data = f.db.struct_signature(s); write!(f, "{}", data.name.display(f.db, f.edition()))?; @@ -897,9 +980,9 @@ fn render_const_scalar( s.fields(f.db), f, &field_types, - f.db.trait_environment(adt.0.into()), + f.db.trait_environment(def.into()), &layout, - subst, + args, b, memory_map, ) @@ -929,9 +1012,9 @@ fn render_const_scalar( var_id.fields(f.db), f, &field_types, - f.db.trait_environment(adt.0.into()), + f.db.trait_environment(def.into()), var_layout, - subst, + args, b, memory_map, ) @@ -939,16 +1022,16 @@ fn render_const_scalar( } } TyKind::FnDef(..) => ty.hir_fmt(f), - TyKind::Function(_) | TyKind::Raw(_, _) => { + TyKind::FnPtr(_, _) | TyKind::RawPtr(_, _) => { let it = u128::from_le_bytes(pad16(b, false)); write!(f, "{it:#X} as ")?; ty.hir_fmt(f) } TyKind::Array(ty, len) => { - let Some(len) = try_const_usize(f.db, len) else { + let Some(len) = consteval_nextsolver::try_const_usize(f.db, len) else { return f.write_str(""); }; - let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env) else { + let Ok(layout) = f.db.layout_of_ty(ty, trait_env) else { return f.write_str(""); }; let size_one = layout.size.bytes_usize(); @@ -961,7 +1044,7 @@ fn render_const_scalar( f.write_str(", ")?; } let offset = size_one * i; - render_const_scalar(f, &b[offset..offset + size_one], memory_map, ty)?; + render_const_scalar_ns(f, &b[offset..offset + size_one], memory_map, ty)?; } f.write_str("]") } @@ -969,17 +1052,19 @@ fn render_const_scalar( TyKind::Closure(_, _) => f.write_str(""), TyKind::Coroutine(_, _) => f.write_str(""), TyKind::CoroutineWitness(_, _) => f.write_str(""), + TyKind::CoroutineClosure(_, _) => f.write_str(""), + TyKind::UnsafeBinder(_) => f.write_str(""), // The below arms are unreachable, since const eval will bail out before here. TyKind::Foreign(_) => f.write_str(""), - TyKind::Error + TyKind::Pat(_, _) => f.write_str(""), + TyKind::Error(..) | TyKind::Placeholder(_) - | TyKind::Alias(_) - | TyKind::AssociatedType(_, _) - | TyKind::OpaqueType(_, _) - | TyKind::BoundVar(_) - | TyKind::InferenceVar(_, _) => f.write_str(""), + | TyKind::Alias(_, _) + | TyKind::Param(_) + | TyKind::Bound(_, _) + | TyKind::Infer(_) => f.write_str(""), // The below arms are unreachable, since we handled them in ref case. - TyKind::Slice(_) | TyKind::Str | TyKind::Dyn(_) => f.write_str(""), + TyKind::Slice(_) | TyKind::Str | TyKind::Dynamic(_, _) => f.write_str(""), } } @@ -989,16 +1074,20 @@ fn render_variant_after_name( field_types: &ArenaMap>, trait_env: Arc, layout: &Layout, - subst: &Substitution, + args: GenericArgs<'_>, b: &[u8], - memory_map: &MemoryMap, + memory_map: &MemoryMap<'_>, ) -> Result<(), HirDisplayError> { + let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block); match data.shape { FieldsShape::Record | FieldsShape::Tuple => { let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| { let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize(); - let ty = field_types[id].clone().substitute(Interner, subst); - let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else { + let ty = field_types[id] + .clone() + .substitute(Interner, &convert_args_for_result(interner, args.as_slice())); + let Ok(layout) = f.db.layout_of_ty(ty.to_nextsolver(interner), trait_env.clone()) + else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -1045,18 +1134,30 @@ impl HirDisplay for Ty { &self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { + let ty = self.to_nextsolver(DbInterner::new_with(db, None, None)); + ty.hir_fmt(f) + } +} + +impl<'db> HirDisplay for crate::next_solver::Ty<'db> { + fn hir_fmt( + &self, + f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_>, + ) -> Result<(), HirDisplayError> { + let interner = DbInterner::new_with(db, None, None); if f.should_truncate() { return write!(f, "{TYPE_HINT_TRUNCATION}"); } - match self.kind(Interner) { + use rustc_type_ir::TyKind; + match self.kind() { TyKind::Never => write!(f, "!")?, TyKind::Str => write!(f, "str")?, - TyKind::Scalar(Scalar::Bool) => write!(f, "bool")?, - TyKind::Scalar(Scalar::Char) => write!(f, "char")?, - &TyKind::Scalar(Scalar::Float(t)) => write!(f, "{}", primitive::float_ty_to_string(t))?, - &TyKind::Scalar(Scalar::Int(t)) => write!(f, "{}", primitive::int_ty_to_string(t))?, - &TyKind::Scalar(Scalar::Uint(t)) => write!(f, "{}", primitive::uint_ty_to_string(t))?, + TyKind::Bool => write!(f, "bool")?, + TyKind::Char => write!(f, "char")?, + TyKind::Float(t) => write!(f, "{}", primitive::float_ty_to_string_ns(t))?, + TyKind::Int(t) => write!(f, "{}", primitive::int_ty_to_string_ns(t))?, + TyKind::Uint(t) => write!(f, "{}", primitive::uint_ty_to_string_ns(t))?, TyKind::Slice(t) => { write!(f, "[")?; t.hir_fmt(f)?; @@ -1066,27 +1167,27 @@ impl HirDisplay for Ty { write!(f, "[")?; t.hir_fmt(f)?; write!(f, "; ")?; - c.hir_fmt(f)?; + convert_const_for_result(interner, c).hir_fmt(f)?; write!(f, "]")?; } - kind @ (TyKind::Raw(m, t) | TyKind::Ref(m, _, t)) => { - if let TyKind::Ref(_, l, _) = kind { + kind @ (TyKind::RawPtr(t, m) | TyKind::Ref(_, t, m)) => { + if let TyKind::Ref(l, _, _) = kind { f.write_char('&')?; - if f.render_lifetime(l) { - l.hir_fmt(f)?; + if f.render_region(l) { + convert_region_for_result(interner, l).hir_fmt(f)?; f.write_char(' ')?; } match m { - Mutability::Not => (), - Mutability::Mut => f.write_str("mut ")?, + rustc_ast_ir::Mutability::Not => (), + rustc_ast_ir::Mutability::Mut => f.write_str("mut ")?, } } else { write!( f, "*{}", match m { - Mutability::Not => "const ", - Mutability::Mut => "mut ", + rustc_ast_ir::Mutability::Not => "const ", + rustc_ast_ir::Mutability::Mut => "mut ", } )?; } @@ -1102,25 +1203,39 @@ impl HirDisplay for Ty { } }) }; - let (preds_to_print, has_impl_fn_pred) = match t.kind(Interner) { - TyKind::Dyn(dyn_ty) => { - let bounds = dyn_ty.bounds.skip_binders().interned(); - let render_lifetime = f.render_lifetime(&dyn_ty.lifetime); - (bounds.len() + render_lifetime as usize, contains_impl_fn(bounds)) + let contains_impl_fn_ns = |bounds: &[BoundExistentialPredicate<'_>]| { + bounds.iter().any(|bound| match bound.skip_binder() { + rustc_type_ir::ExistentialPredicate::Trait(trait_ref) => { + let trait_ = trait_ref.def_id.0; + fn_traits(db, trait_).any(|it| it == trait_) + } + _ => false, + }) + }; + let (preds_to_print, has_impl_fn_pred) = match t.kind() { + TyKind::Dynamic(bounds, region) => { + let render_lifetime = f.render_region(region); + ( + bounds.len() + render_lifetime as usize, + contains_impl_fn_ns(bounds.as_slice()), + ) } - TyKind::Alias(AliasTy::Opaque(OpaqueTy { - opaque_ty_id, - substitution: parameters, - })) - | TyKind::OpaqueType(opaque_ty_id, parameters) => { - let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into()); + TyKind::Alias(AliasTyKind::Opaque, ty) => { + let opaque_ty_id = match ty.def_id { + SolverDefId::InternedOpaqueTyId(id) => id, + _ => unreachable!(), + }; + let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id { let datas = db .return_type_impl_traits(func) .expect("impl trait id without data"); let data = (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); - let bounds = data.substitute(Interner, parameters); + let bounds = data.substitute( + Interner, + &convert_args_for_result(interner, ty.args.as_slice()), + ); let mut len = bounds.skip_binders().len(); // Don't count Sized but count when it absent @@ -1167,24 +1282,26 @@ impl HirDisplay for Ty { t.hir_fmt(f)?; } } - TyKind::Tuple(_, substs) => { - if substs.len(Interner) == 1 { + TyKind::Tuple(tys) => { + if tys.len() == 1 { write!(f, "(")?; - substs.at(Interner, 0).hir_fmt(f)?; + tys.as_slice()[0].hir_fmt(f)?; write!(f, ",)")?; } else { write!(f, "(")?; - f.write_joined(substs.as_slice(Interner), ", ")?; + f.write_joined(tys.as_slice(), ", ")?; write!(f, ")")?; } } - TyKind::Function(fn_ptr) => { - let sig = CallableSig::from_fn_ptr(fn_ptr); + TyKind::FnPtr(sig, header) => { + let sig = CallableSig::from_fn_sig_and_header(interner, sig, header); sig.hir_fmt(f)?; } - TyKind::FnDef(def, parameters) => { - let def = from_chalk(db, *def); - let sig = db.callable_item_signature(def).substitute(Interner, parameters); + TyKind::FnDef(def, args) => { + let def = def.0; + let sig = db + .callable_item_signature(def) + .substitute(Interner, &convert_args_for_result(interner, args.as_slice())); if f.display_kind.is_source_code() { // `FnDef` is anonymous and there's no surface syntax for it. Show it as a @@ -1222,6 +1339,7 @@ impl HirDisplay for Ty { }; f.end_location_link(); + let parameters = convert_args_for_result(interner, args.as_slice()); if parameters.len(Interner) > 0 { let generic_def_id = GenericDefId::from_callable(db, def); let generics = generics(db, generic_def_id); @@ -1280,11 +1398,12 @@ impl HirDisplay for Ty { ret.hir_fmt(f)?; } } - TyKind::Adt(AdtId(def_id), parameters) => { - f.start_location_link((*def_id).into()); + TyKind::Adt(def, parameters) => { + let def_id = def.def_id().0; + f.start_location_link(def_id.into()); match f.display_kind { DisplayKind::Diagnostics | DisplayKind::Test => { - let name = match *def_id { + let name = match def_id { hir_def::AdtId::StructId(it) => db.struct_signature(it).name.clone(), hir_def::AdtId::UnionId(it) => db.union_signature(it).name.clone(), hir_def::AdtId::EnumId(it) => db.enum_signature(it).name.clone(), @@ -1294,12 +1413,12 @@ impl HirDisplay for Ty { DisplayKind::SourceCode { target_module_id: module_id, allow_opaque: _ } => { if let Some(path) = find_path::find_path( db, - ItemInNs::Types((*def_id).into()), + ItemInNs::Types(def_id.into()), module_id, PrefixKind::Plain, false, // FIXME: no_std Cfg? - ImportPathConfig { + FindPathConfig { prefer_no_std: false, prefer_prelude: true, prefer_absolute: false, @@ -1316,55 +1435,45 @@ impl HirDisplay for Ty { } f.end_location_link(); - let generic_def = self.as_generic_def(db); - - hir_fmt_generics(f, parameters.as_slice(Interner), generic_def, None)?; + hir_fmt_generics( + f, + convert_args_for_result(interner, parameters.as_slice()).as_slice(Interner), + Some(def.def_id().0.into()), + None, + )?; } - TyKind::AssociatedType(assoc_type_id, parameters) => { - let type_alias = from_assoc_type_id(*assoc_type_id); - let trait_ = match type_alias.lookup(db).container { - ItemContainerId::TraitId(it) => it, - _ => panic!("not an associated type"), + TyKind::Alias(AliasTyKind::Projection, alias_ty) => { + let type_alias = match alias_ty.def_id { + SolverDefId::TypeAliasId(id) => id, + _ => unreachable!(), }; - let trait_data = db.trait_signature(trait_); - let type_alias_data = db.type_alias_signature(type_alias); - - // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types) - if f.display_kind.is_test() { - f.start_location_link(trait_.into()); - write!(f, "{}", trait_data.name.display(f.db, f.edition()))?; - f.end_location_link(); - write!(f, "::")?; + let parameters = convert_args_for_result(interner, alias_ty.args.as_slice()); - f.start_location_link(type_alias.into()); - write!(f, "{}", type_alias_data.name.display(f.db, f.edition()))?; - f.end_location_link(); - // Note that the generic args for the associated type come before those for the - // trait (including the self type). - hir_fmt_generics(f, parameters.as_slice(Interner), None, None) - } else { - let projection_ty = ProjectionTy { - associated_ty_id: to_assoc_type_id(type_alias), - substitution: parameters.clone(), - }; + let projection_ty = ProjectionTy { + associated_ty_id: to_assoc_type_id(type_alias), + substitution: parameters.clone(), + }; - projection_ty.hir_fmt(f) - }?; + projection_ty.hir_fmt(f)?; } - TyKind::Foreign(type_alias) => { - let alias = from_foreign_def_id(*type_alias); - let type_alias = db.type_alias_signature(alias); - f.start_location_link(alias.into()); + TyKind::Foreign(alias) => { + let type_alias = db.type_alias_signature(alias.0); + f.start_location_link(alias.0.into()); write!(f, "{}", type_alias.name.display(f.db, f.edition()))?; f.end_location_link(); } - TyKind::OpaqueType(opaque_ty_id, parameters) => { + TyKind::Alias(AliasTyKind::Opaque, alias_ty) => { + let opaque_ty_id = match alias_ty.def_id { + SolverDefId::InternedOpaqueTyId(id) => id, + _ => unreachable!(), + }; + let parameters = convert_args_for_result(interner, alias_ty.args.as_slice()); if !f.display_kind.allows_opaque() { return Err(HirDisplayError::DisplaySourceCodeError( DisplaySourceCodeError::OpaqueType, )); } - let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into()); + let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); match impl_trait_id { ImplTraitId::ReturnTypeImplTrait(func, idx) => { let datas = @@ -1376,7 +1485,7 @@ impl HirDisplay for Ty { write_bounds_like_dyn_trait_with_prefix( f, "impl", - Either::Left(self), + Either::Left(&convert_ty_for_result(interner, *self)), bounds.skip_binders(), SizedByDefault::Sized { anchor: krate }, )?; @@ -1391,7 +1500,7 @@ impl HirDisplay for Ty { write_bounds_like_dyn_trait_with_prefix( f, "impl", - Either::Left(self), + Either::Left(&convert_ty_for_result(interner, *self)), bounds.skip_binders(), SizedByDefault::Sized { anchor: krate }, )?; @@ -1426,6 +1535,8 @@ impl HirDisplay for Ty { } } TyKind::Closure(id, substs) => { + let id = id.0; + let substs = convert_args_for_result(interner, substs.as_slice()); if f.display_kind.is_source_code() { if !f.display_kind.allows_opaque() { return Err(HirDisplayError::DisplaySourceCodeError( @@ -1435,22 +1546,23 @@ impl HirDisplay for Ty { never!("Only `impl Fn` is valid for displaying closures in source code"); } } + let chalk_id: chalk_ir::ClosureId<_> = id.into(); match f.closure_style { ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"), ClosureStyle::ClosureWithId => { - return write!(f, "{{closure#{:?}}}", id.0.index()); + return write!(f, "{{closure#{:?}}}", chalk_id.0.index()); } ClosureStyle::ClosureWithSubst => { - write!(f, "{{closure#{:?}}}", id.0.index())?; + write!(f, "{{closure#{:?}}}", chalk_id.0.index())?; return hir_fmt_generics(f, substs.as_slice(Interner), None, None); } _ => (), } - let sig = ClosureSubst(substs).sig_ty().callable_sig(db); + let sig = ClosureSubst(&substs).sig_ty(db).callable_sig(db); if let Some(sig) = sig { - let InternedClosure(def, _) = db.lookup_intern_closure((*id).into()); + let InternedClosure(def, _) = db.lookup_intern_closure(id); let infer = db.infer(def); - let (_, kind) = infer.closure_info(id); + let (_, kind) = infer.closure_info(&chalk_id); match f.closure_style { ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?, ClosureStyle::RANotation => write!(f, "|")?, @@ -1477,10 +1589,10 @@ impl HirDisplay for Ty { write!(f, "{{closure}}")?; } } - TyKind::Placeholder(idx) => { - let id = from_placeholder_idx(db, *idx); - let generics = generics(db, id.parent); - let param_data = &generics[id.local_id]; + TyKind::Placeholder(_) => write!(f, "{{placeholder}}")?, + TyKind::Param(param) => { + let generics = generics(db, param.id.parent()); + let param_data = &generics[param.id.local_id()]; match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { @@ -1496,27 +1608,33 @@ impl HirDisplay for Ty { TypeParamProvenance::ArgumentImplTrait => { let substs = generics.placeholder_subst(db); let bounds = db - .generic_predicates(id.parent) + .generic_predicates(param.id.parent()) .iter() .map(|pred| pred.clone().substitute(Interner, &substs)) .filter(|wc| match wc.skip_binders() { WhereClause::Implemented(tr) => { - tr.self_type_parameter(Interner) == *self + tr.self_type_parameter(Interner) + == convert_ty_for_result(interner, *self) } WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), ty: _, - }) => proj.self_type_parameter(db) == *self, + }) => { + proj.self_type_parameter(db) + == convert_ty_for_result(interner, *self) + } WhereClause::AliasEq(_) => false, - WhereClause::TypeOutlives(to) => to.ty == *self, + WhereClause::TypeOutlives(to) => { + to.ty == convert_ty_for_result(interner, *self) + } WhereClause::LifetimeOutlives(_) => false, }) .collect::>(); - let krate = id.parent.module(db).krate(); + let krate = param.id.parent().module(db).krate(); write_bounds_like_dyn_trait_with_prefix( f, "impl", - Either::Left(self), + Either::Left(&convert_ty_for_result(interner, *self)), &bounds, SizedByDefault::Sized { anchor: krate }, )?; @@ -1527,8 +1645,16 @@ impl HirDisplay for Ty { } } } - TyKind::BoundVar(idx) => idx.hir_fmt(f)?, - TyKind::Dyn(dyn_ty) => { + TyKind::Bound(debruijn_index, ty) => { + let idx = chalk_ir::BoundVar { + debruijn: chalk_ir::DebruijnIndex::new(debruijn_index.as_u32()), + index: ty.var.as_usize(), + }; + idx.hir_fmt(f)? + } + TyKind::Dynamic(..) => { + let ty = convert_ty_for_result(interner, *self); + let chalk_ir::TyKind::Dyn(dyn_ty) = ty.kind(Interner) else { unreachable!() }; // Reorder bounds to satisfy `write_bounds_like_dyn_trait()`'s expectation. // FIXME: `Iterator::partition_in_place()` or `Vec::extract_if()` may make it // more efficient when either of them hits stable. @@ -1544,7 +1670,7 @@ impl HirDisplay for Ty { bounds.push(Binders::empty( Interner, chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives { - ty: self.clone(), + ty: ty.clone(), lifetime: dyn_ty.lifetime.clone(), }), )); @@ -1553,90 +1679,42 @@ impl HirDisplay for Ty { write_bounds_like_dyn_trait_with_prefix( f, "dyn", - Either::Left(self), + Either::Left(&ty), &bounds, SizedByDefault::NotSized, )?; } - TyKind::Alias(AliasTy::Projection(p_ty)) => p_ty.hir_fmt(f)?, - TyKind::Alias(AliasTy::Opaque(opaque_ty)) => { - if !f.display_kind.allows_opaque() { - return Err(HirDisplayError::DisplaySourceCodeError( - DisplaySourceCodeError::OpaqueType, - )); - } - let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into()); - match impl_trait_id { - ImplTraitId::ReturnTypeImplTrait(func, idx) => { - let datas = - db.return_type_impl_traits(func).expect("impl trait id without data"); - let data = - (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); - let bounds = data.substitute(Interner, &opaque_ty.substitution); - let krate = func.krate(db); - write_bounds_like_dyn_trait_with_prefix( - f, - "impl", - Either::Left(self), - bounds.skip_binders(), - SizedByDefault::Sized { anchor: krate }, - )?; - } - ImplTraitId::TypeAliasImplTrait(alias, idx) => { - let datas = - db.type_alias_impl_traits(alias).expect("impl trait id without data"); - let data = - (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); - let bounds = data.substitute(Interner, &opaque_ty.substitution); - let krate = alias.krate(db); - write_bounds_like_dyn_trait_with_prefix( - f, - "impl", - Either::Left(self), - bounds.skip_binders(), - SizedByDefault::Sized { anchor: krate }, - )?; - } - ImplTraitId::AsyncBlockTypeImplTrait(..) => { - write!(f, "{{async block}}")?; - } - }; - } - TyKind::Error => { + TyKind::Error(_) => { if f.display_kind.is_source_code() { f.write_char('_')?; } else { write!(f, "{{unknown}}")?; } } - TyKind::InferenceVar(..) => write!(f, "_")?, + TyKind::Infer(..) => write!(f, "_")?, TyKind::Coroutine(_, subst) => { if f.display_kind.is_source_code() { return Err(HirDisplayError::DisplaySourceCodeError( DisplaySourceCodeError::Coroutine, )); } - let subst = subst.as_slice(Interner); - let a: Option> = subst - .get(subst.len() - 3..) - .and_then(|args| args.iter().map(|arg| arg.ty(Interner)).collect()); + let CoroutineArgsParts { resume_ty, yield_ty, return_ty, .. } = + subst.split_coroutine_args(); + write!(f, "|")?; + resume_ty.hir_fmt(f)?; + write!(f, "|")?; - if let Some([resume_ty, yield_ty, ret_ty]) = a.as_deref() { - write!(f, "|")?; - resume_ty.hir_fmt(f)?; - write!(f, "|")?; + write!(f, " yields ")?; + yield_ty.hir_fmt(f)?; - write!(f, " yields ")?; - yield_ty.hir_fmt(f)?; - - write!(f, " -> ")?; - ret_ty.hir_fmt(f)?; - } else { - // This *should* be unreachable, but fallback just in case. - write!(f, "{{coroutine}}")?; - } + write!(f, " -> ")?; + return_ty.hir_fmt(f)?; } TyKind::CoroutineWitness(..) => write!(f, "{{coroutine witness}}")?, + TyKind::Pat(_, _) => write!(f, "{{pat}}")?, + TyKind::UnsafeBinder(_) => write!(f, "{{unsafe binder}}")?, + TyKind::CoroutineClosure(_, _) => write!(f, "{{coroutine closure}}")?, + TyKind::Alias(_, _) => write!(f, "{{alias}}")?, } Ok(()) } @@ -1663,6 +1741,27 @@ fn hir_fmt_generics( Ok(()) } +fn hir_fmt_generics_ns<'db>( + f: &mut HirFormatter<'_>, + parameters: &[crate::next_solver::GenericArg<'db>], + generic_def: Option, + self_: Option>, +) -> Result<(), HirDisplayError> { + if parameters.is_empty() { + return Ok(()); + } + + let parameters_to_write = generic_args_sans_defaults_ns(f, generic_def, parameters); + + if !parameters_to_write.is_empty() { + write!(f, "<")?; + hir_fmt_generic_arguments_ns(f, parameters_to_write, self_)?; + write!(f, ">")?; + } + + Ok(()) +} + fn generic_args_sans_defaults<'ga>( f: &mut HirFormatter<'_>, generic_def: Option, @@ -1718,6 +1817,87 @@ fn generic_args_sans_defaults<'ga>( } } +fn hir_fmt_generic_args<'db>( + f: &mut HirFormatter<'_>, + parameters: &[crate::next_solver::GenericArg<'db>], + generic_def: Option, + self_: Option>, +) -> Result<(), HirDisplayError> { + if parameters.is_empty() { + return Ok(()); + } + + let parameters_to_write = generic_args_sans_defaults_ns(f, generic_def, parameters); + + if !parameters_to_write.is_empty() { + write!(f, "<")?; + hir_fmt_generic_arguments_ns(f, parameters_to_write, self_)?; + write!(f, ">")?; + } + + Ok(()) +} + +fn generic_args_sans_defaults_ns<'ga, 'db>( + f: &mut HirFormatter<'_>, + generic_def: Option, + parameters: &'ga [crate::next_solver::GenericArg<'db>], +) -> &'ga [crate::next_solver::GenericArg<'db>] { + let interner = DbInterner::new_with(f.db, Some(f.krate()), None); + if f.display_kind.is_source_code() || f.omit_verbose_types() { + match generic_def + .map(|generic_def_id| f.db.generic_defaults(generic_def_id)) + .filter(|it| !it.is_empty()) + { + None => parameters, + Some(default_parameters) => { + let should_show = |arg: &crate::next_solver::GenericArg<'db>, i: usize| { + let is_err = |arg: &crate::next_solver::GenericArg<'db>| match arg.kind() { + rustc_type_ir::GenericArgKind::Lifetime(it) => { + matches!(it.kind(), RegionKind::ReError(..)) + } + rustc_type_ir::GenericArgKind::Type(it) => { + matches!(it.kind(), rustc_type_ir::TyKind::Error(..)) + } + rustc_type_ir::GenericArgKind::Const(it) => { + matches!(it.kind(), rustc_type_ir::ConstKind::Error(..),) + } + }; + // if the arg is error like, render it to inform the user + if is_err(arg) { + return true; + } + // otherwise, if the arg is equal to the param default, hide it (unless the + // default is an error which can happen for the trait Self type) + match default_parameters.get(i) { + None => true, + Some(default_parameter) => { + // !is_err(default_parameter.skip_binders()) + // && + arg != &default_parameter + .clone() + .substitute( + Interner, + &convert_args_for_result(interner, ¶meters[..i]), + ) + .to_nextsolver(interner) + } + } + }; + let mut default_from = 0; + for (i, parameter) in parameters.iter().enumerate() { + if should_show(parameter, i) { + default_from = i + 1; + } + } + ¶meters[0..default_from] + } + } + } else { + parameters + } +} + fn hir_fmt_generic_arguments( f: &mut HirFormatter<'_>, parameters: &[GenericArg], @@ -1742,6 +1922,30 @@ fn hir_fmt_generic_arguments( Ok(()) } +fn hir_fmt_generic_arguments_ns<'db>( + f: &mut HirFormatter<'_>, + parameters: &[crate::next_solver::GenericArg<'db>], + self_: Option>, +) -> Result<(), HirDisplayError> { + let mut first = true; + let lifetime_offset = parameters.iter().position(|arg| arg.region().is_some()); + + let (ty_or_const, lifetimes) = match lifetime_offset { + Some(offset) => parameters.split_at(offset), + None => (parameters, &[][..]), + }; + for generic_arg in lifetimes.iter().chain(ty_or_const) { + if !mem::take(&mut first) { + write!(f, ", ")?; + } + match self_ { + self_ @ Some(_) if generic_arg.ty() == self_ => write!(f, "Self")?, + _ => generic_arg.hir_fmt(f)?, + } + } + Ok(()) +} + impl HirDisplay for CallableSig { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { let CallableSig { params_and_return: _, is_varargs, safety, abi: _ } = *self; @@ -1982,6 +2186,17 @@ impl HirDisplay for TraitRef { } } +impl<'db> HirDisplay for crate::next_solver::TraitRef<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { + let trait_ = self.def_id.0; + f.start_location_link(trait_.into()); + write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; + f.end_location_link(); + let substs = self.args.as_slice(); + hir_fmt_generic_args(f, &substs[1..], None, substs[0].ty()) + } +} + impl HirDisplay for WhereClause { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { @@ -2040,7 +2255,7 @@ impl HirDisplay for LifetimeData { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match self { LifetimeData::Placeholder(idx) => { - let id = lt_from_placeholder_idx(f.db, *idx); + let id = lt_from_placeholder_idx(f.db, *idx).0; let generics = generics(f.db, id.parent); let param_data = &generics[id.local_id]; write!(f, "{}", param_data.name.display(f.db, f.edition()))?; @@ -2062,6 +2277,34 @@ impl HirDisplay for LifetimeData { } } +impl<'db> HirDisplay for crate::next_solver::Region<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { + match self.kind() { + rustc_type_ir::RegionKind::ReEarlyParam(param) => { + let generics = generics(f.db, param.id.parent); + let param_data = &generics[param.id.local_id]; + write!(f, "{}", param_data.name.display(f.db, f.edition()))?; + Ok(()) + } + rustc_type_ir::RegionKind::ReBound(db, idx) => { + write!(f, "?{}.{}", db.as_u32(), idx.var.as_u32()) + } + rustc_type_ir::RegionKind::ReVar(_) => write!(f, "_"), + rustc_type_ir::RegionKind::ReStatic => write!(f, "'static"), + rustc_type_ir::RegionKind::ReError(..) => { + if cfg!(test) { + write!(f, "'?") + } else { + write!(f, "'_") + } + } + rustc_type_ir::RegionKind::ReErased => write!(f, "'"), + rustc_type_ir::RegionKind::RePlaceholder(_) => write!(f, ""), + rustc_type_ir::RegionKind::ReLateParam(_) => write!(f, ""), + } + } +} + impl HirDisplay for DomainGoal { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match self { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs index 5577be890da34..f5c2f41069ea0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs @@ -120,7 +120,7 @@ pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc) -> bool { value: InEnvironment::new(&env.env, trait_ref.cast(Interner)), binders: CanonicalVarKinds::empty(Interner), }; - db.trait_solve(env.krate, env.block, goal).is_some() + db.trait_solve(env.krate, env.block, goal).certain() } pub(crate) fn has_drop_glue_cycle_result( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index 6294d683e6c02..b87c998217741 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -2,27 +2,29 @@ use std::ops::ControlFlow; -use chalk_ir::{ - DebruijnIndex, - cast::Cast, - visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, -}; -use chalk_solve::rust_ir::InlineBound; use hir_def::{ AssocItemId, ConstId, CrateRootModuleId, FunctionId, GenericDefId, HasModule, TraitId, - TypeAliasId, lang_item::LangItem, signatures::TraitFlags, + TypeAliasId, TypeOrConstParamId, TypeParamId, hir::generics::LocalTypeOrConstParamId, + lang_item::LangItem, signatures::TraitFlags, }; +use intern::Symbol; use rustc_hash::FxHashSet; +use rustc_type_ir::{ + AliasTyKind, ClauseKind, PredicatePolarity, TypeSuperVisitable as _, TypeVisitable as _, + Upcast, elaborate, + inherent::{IntoKind, SliceLike}, +}; use smallvec::SmallVec; use crate::{ - AliasEq, AliasTy, Binders, BoundVar, CallableSig, GoalData, ImplTraitId, Interner, OpaqueTyId, - ProjectionTyExt, Solution, Substitution, TraitRef, Ty, TyKind, WhereClause, all_super_traits, - db::HirDatabase, - from_assoc_type_id, from_chalk_trait_id, - generics::{generics, trait_self_param_idx}, - to_chalk_trait_id, - utils::elaborate_clause_supertraits, + ImplTraitId, + db::{HirDatabase, InternedOpaqueTyId}, + lower_nextsolver::associated_ty_item_bounds, + next_solver::{ + Clause, Clauses, DbInterner, GenericArgs, ParamEnv, SolverDefId, TraitPredicate, TraitRef, + TypingMode, infer::DbInternerInferExt, mk_param, + }, + traits::next_trait_solve_in_ctxt, }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -52,13 +54,18 @@ pub fn dyn_compatibility( db: &dyn HirDatabase, trait_: TraitId, ) -> Option { - for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() { - if db.dyn_compatibility_of_trait(super_trait).is_some() { - return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait)); + let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None); + for super_trait in elaborate::supertrait_def_ids(interner, trait_.into()) { + if let Some(v) = db.dyn_compatibility_of_trait(super_trait.0) { + return if super_trait.0 == trait_ { + Some(v) + } else { + Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait.0)) + }; } } - db.dyn_compatibility_of_trait(trait_) + None } pub fn dyn_compatibility_with_callback( @@ -69,8 +76,9 @@ pub fn dyn_compatibility_with_callback( where F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { - for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() { - if db.dyn_compatibility_of_trait(super_trait).is_some() { + let interner = DbInterner::new_with(db, Some(trait_.krate(db)), None); + for super_trait in elaborate::supertrait_def_ids(interner, trait_.into()).skip(1) { + if db.dyn_compatibility_of_trait(super_trait.0).is_some() { cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?; } } @@ -128,27 +136,26 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b return false; }; - let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else { - return false; - }; - - let predicates = &*db.generic_predicates(def); - let predicates = predicates.iter().map(|p| p.skip_binders().skip_binders().clone()); - elaborate_clause_supertraits(db, predicates).any(|pred| match pred { - WhereClause::Implemented(trait_ref) => { - if from_chalk_trait_id(trait_ref.trait_id) == sized - && let TyKind::BoundVar(it) = - *trait_ref.self_type_parameter(Interner).kind(Interner) - { - // Since `generic_predicates` is `Binder>`, the `DebrujinIndex` of - // self-parameter is `1` - return it - .index_if_bound_at(DebruijnIndex::ONE) - .is_some_and(|idx| idx == trait_self_param_idx); + let interner = DbInterner::new_with(db, Some(krate), None); + let predicates = db.generic_predicates_ns(def); + // FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to + // rust-analyzer yet + // https://github.com/rust-lang/rust/blob/ddaf12390d3ffb7d5ba74491a48f3cd528e5d777/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L490 + elaborate::elaborate(interner, predicates.iter().copied()).any(|pred| { + match pred.kind().skip_binder() { + ClauseKind::Trait(trait_pred) => { + if sized == trait_pred.def_id().0 + && let rustc_type_ir::TyKind::Param(param_ty) = + trait_pred.trait_ref.self_ty().kind() + && param_ty.index == 0 + { + true + } else { + false + } } - false + _ => false, } - _ => false, }) } @@ -156,7 +163,7 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b // but we don't have good way to render such locations. // So, just return single boolean value for existence of such `Self` reference fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool { - db.generic_predicates(trait_.into()) + db.generic_predicates_ns(trait_.into()) .iter() .any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No)) } @@ -168,37 +175,18 @@ fn bounds_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool { .items .iter() .filter_map(|(_, it)| match *it { - AssocItemId::TypeAliasId(id) => { - let assoc_ty_data = db.associated_ty_data(id); - Some(assoc_ty_data) - } + AssocItemId::TypeAliasId(id) => Some(associated_ty_item_bounds(db, id)), _ => None, }) - .any(|assoc_ty_data| { - assoc_ty_data.binders.skip_binders().bounds.iter().any(|bound| { - let def = from_assoc_type_id(assoc_ty_data.id).into(); - match bound.skip_binders() { - InlineBound::TraitBound(it) => it.args_no_self.iter().any(|arg| { - contains_illegal_self_type_reference( - db, - def, - trait_, - arg, - DebruijnIndex::ONE, - AllowSelfProjection::Yes, - ) - }), - InlineBound::AliasEqBound(it) => it.parameters.iter().any(|arg| { - contains_illegal_self_type_reference( - db, - def, - trait_, - arg, - DebruijnIndex::ONE, - AllowSelfProjection::Yes, - ) - }), - } + .any(|bounds| { + bounds.skip_binder().iter().any(|pred| match pred.skip_binder() { + rustc_type_ir::ExistentialPredicate::Trait(it) => it.args.iter().any(|arg| { + contains_illegal_self_type_reference(db, trait_, &arg, AllowSelfProjection::Yes) + }), + rustc_type_ir::ExistentialPredicate::Projection(it) => it.args.iter().any(|arg| { + contains_illegal_self_type_reference(db, trait_, &arg, AllowSelfProjection::Yes) + }), + rustc_type_ir::ExistentialPredicate::AutoTrait(_) => false, }) }) } @@ -209,114 +197,80 @@ enum AllowSelfProjection { No, } -fn predicate_references_self( - db: &dyn HirDatabase, +fn predicate_references_self<'db>( + db: &'db dyn HirDatabase, trait_: TraitId, - predicate: &Binders>, + predicate: &Clause<'db>, allow_self_projection: AllowSelfProjection, ) -> bool { - match predicate.skip_binders().skip_binders() { - WhereClause::Implemented(trait_ref) => { - trait_ref.substitution.iter(Interner).skip(1).any(|arg| { - contains_illegal_self_type_reference( - db, - trait_.into(), - trait_, - arg, - DebruijnIndex::ONE, - allow_self_projection, - ) - }) - } - WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), .. }) => { - proj.substitution.iter(Interner).skip(1).any(|arg| { - contains_illegal_self_type_reference( - db, - trait_.into(), - trait_, - arg, - DebruijnIndex::ONE, - allow_self_projection, - ) + match predicate.kind().skip_binder() { + ClauseKind::Trait(trait_pred) => trait_pred.trait_ref.args.iter().skip(1).any(|arg| { + contains_illegal_self_type_reference(db, trait_, &arg, allow_self_projection) + }), + ClauseKind::Projection(proj_pred) => { + proj_pred.projection_term.args.iter().skip(1).any(|arg| { + contains_illegal_self_type_reference(db, trait_, &arg, allow_self_projection) }) } _ => false, } } -fn contains_illegal_self_type_reference>( - db: &dyn HirDatabase, - def: GenericDefId, +fn contains_illegal_self_type_reference<'db, T: rustc_type_ir::TypeVisitable>>( + db: &'db dyn HirDatabase, trait_: TraitId, t: &T, - outer_binder: DebruijnIndex, allow_self_projection: AllowSelfProjection, ) -> bool { - let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else { - return false; - }; - struct IllegalSelfTypeVisitor<'a> { - db: &'a dyn HirDatabase, + struct IllegalSelfTypeVisitor<'db> { + db: &'db dyn HirDatabase, trait_: TraitId, super_traits: Option>, - trait_self_param_idx: usize, allow_self_projection: AllowSelfProjection, } - impl TypeVisitor for IllegalSelfTypeVisitor<'_> { - type BreakTy = (); + impl<'db> rustc_type_ir::TypeVisitor> for IllegalSelfTypeVisitor<'db> { + type Result = ControlFlow<()>; - fn as_dyn(&mut self) -> &mut dyn TypeVisitor { - self - } - - fn interner(&self) -> Interner { - Interner - } - - fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow { - match ty.kind(Interner) { - TyKind::BoundVar(BoundVar { debruijn, index }) => { - if *debruijn == outer_binder && *index == self.trait_self_param_idx { - ControlFlow::Break(()) - } else { - ty.super_visit_with(self.as_dyn(), outer_binder) - } - } - TyKind::Alias(AliasTy::Projection(proj)) => match self.allow_self_projection { + fn visit_ty( + &mut self, + ty: as rustc_type_ir::Interner>::Ty, + ) -> Self::Result { + let interner = DbInterner::new_with(self.db, None, None); + match ty.kind() { + rustc_type_ir::TyKind::Param(param) if param.index == 0 => ControlFlow::Break(()), + rustc_type_ir::TyKind::Param(_) => ControlFlow::Continue(()), + rustc_type_ir::TyKind::Alias(AliasTyKind::Projection, proj) => match self + .allow_self_projection + { AllowSelfProjection::Yes => { - let trait_ = proj.trait_(self.db); + let trait_ = proj.trait_def_id(DbInterner::new_with(self.db, None, None)); + let trait_ = match trait_ { + SolverDefId::TraitId(id) => id, + _ => unreachable!(), + }; if self.super_traits.is_none() { - self.super_traits = Some(all_super_traits(self.db, self.trait_)); + self.super_traits = Some( + elaborate::supertrait_def_ids(interner, self.trait_.into()) + .map(|super_trait| super_trait.0) + .collect(), + ) } if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) { ControlFlow::Continue(()) } else { - ty.super_visit_with(self.as_dyn(), outer_binder) + ty.super_visit_with(self) } } - AllowSelfProjection::No => ty.super_visit_with(self.as_dyn(), outer_binder), + AllowSelfProjection::No => ty.super_visit_with(self), }, - _ => ty.super_visit_with(self.as_dyn(), outer_binder), + _ => ty.super_visit_with(self), } } - - fn visit_const( - &mut self, - constant: &chalk_ir::Const, - outer_binder: DebruijnIndex, - ) -> std::ops::ControlFlow { - constant.data(Interner).ty.super_visit_with(self.as_dyn(), outer_binder) - } } - let mut visitor = IllegalSelfTypeVisitor { - db, - trait_, - super_traits: None, - trait_self_param_idx, - allow_self_projection, - }; - t.visit_with(visitor.as_dyn(), outer_binder).is_break() + let mut visitor = + IllegalSelfTypeVisitor { db, trait_, super_traits: None, allow_self_projection }; + t.visit_with(&mut visitor).is_break() } fn dyn_compatibility_violation_for_assoc_item( @@ -375,26 +329,21 @@ where cb(MethodViolationCode::AsyncFn)?; } - let sig = db.callable_item_signature(func.into()); - if sig.skip_binders().params().iter().skip(1).any(|ty| { - contains_illegal_self_type_reference( - db, - func.into(), - trait_, - ty, - DebruijnIndex::INNERMOST, - AllowSelfProjection::Yes, - ) - }) { + let sig = db.callable_item_signature_ns(func.into()); + if sig + .skip_binder() + .inputs() + .iter() + .skip(1) + .any(|ty| contains_illegal_self_type_reference(db, trait_, &ty, AllowSelfProjection::Yes)) + { cb(MethodViolationCode::ReferencesSelfInput)?; } if contains_illegal_self_type_reference( db, - func.into(), trait_, - sig.skip_binders().ret(), - DebruijnIndex::INNERMOST, + &sig.skip_binder().output(), AllowSelfProjection::Yes, ) { cb(MethodViolationCode::ReferencesSelfOutput)?; @@ -415,40 +364,28 @@ where cb(MethodViolationCode::UndispatchableReceiver)?; } - let predicates = &*db.generic_predicates_without_parent(func.into()); - let trait_self_idx = trait_self_param_idx(db, func.into()); + let predicates = &*db.generic_predicates_without_parent_ns(func.into()); for pred in predicates { - let pred = pred.skip_binders().skip_binders(); + let pred = pred.kind().skip_binder(); - if matches!(pred, WhereClause::TypeOutlives(_)) { + if matches!(pred, ClauseKind::TypeOutlives(_)) { continue; } // Allow `impl AutoTrait` predicates - if let WhereClause::Implemented(TraitRef { trait_id, substitution }) = pred { - let trait_data = db.trait_signature(from_chalk_trait_id(*trait_id)); - if trait_data.flags.contains(TraitFlags::AUTO) - && substitution - .as_slice(Interner) - .first() - .and_then(|arg| arg.ty(Interner)) - .and_then(|ty| ty.bound_var(Interner)) - .is_some_and(|b| { - b.debruijn == DebruijnIndex::ONE && Some(b.index) == trait_self_idx - }) - { - continue; - } + if let ClauseKind::Trait(TraitPredicate { + trait_ref: pred_trait_ref, + polarity: PredicatePolarity::Positive, + }) = pred + && let trait_data = db.trait_signature(pred_trait_ref.def_id.0) + && trait_data.flags.contains(TraitFlags::AUTO) + && let rustc_type_ir::TyKind::Param(crate::next_solver::ParamTy { index: 0, .. }) = + pred_trait_ref.self_ty().kind() + { + continue; } - if contains_illegal_self_type_reference( - db, - func.into(), - trait_, - pred, - DebruijnIndex::ONE, - AllowSelfProjection::Yes, - ) { + if contains_illegal_self_type_reference(db, trait_, &pred, AllowSelfProjection::Yes) { cb(MethodViolationCode::WhereClauseReferencesSelf)?; break; } @@ -457,34 +394,34 @@ where ControlFlow::Continue(()) } -fn receiver_is_dispatchable( +fn receiver_is_dispatchable<'db>( db: &dyn HirDatabase, trait_: TraitId, func: FunctionId, - sig: &Binders, + sig: &crate::next_solver::EarlyBinder< + 'db, + crate::next_solver::Binder<'db, rustc_type_ir::FnSig>>, + >, ) -> bool { - let Some(trait_self_idx) = trait_self_param_idx(db, func.into()) else { - return false; - }; + let sig = sig.instantiate_identity(); + + let interner: DbInterner<'_> = DbInterner::new_with(db, Some(trait_.krate(db)), None); + let self_param_id = TypeParamId::from_unchecked(TypeOrConstParamId { + parent: trait_.into(), + local_id: LocalTypeOrConstParamId::from_raw(la_arena::RawIdx::from_u32(0)), + }); + let self_param_ty = crate::next_solver::Ty::new( + interner, + rustc_type_ir::TyKind::Param(crate::next_solver::ParamTy { index: 0, id: self_param_id }), + ); // `self: Self` can't be dispatched on, but this is already considered dyn-compatible // See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437 - if sig - .skip_binders() - .params() - .first() - .and_then(|receiver| receiver.bound_var(Interner)) - .is_some_and(|b| { - b == BoundVar { debruijn: DebruijnIndex::INNERMOST, index: trait_self_idx } - }) - { + if sig.inputs().iter().next().is_some_and(|p| p.skip_binder() == self_param_ty) { return true; } - let placeholder_subst = generics(db, func.into()).placeholder_subst(db); - - let substituted_sig = sig.clone().substitute(Interner, &placeholder_subst); - let Some(receiver_ty) = substituted_sig.params().first() else { + let Some(&receiver_ty) = sig.inputs().skip_binder().as_slice().first() else { return false; }; @@ -497,103 +434,109 @@ fn receiver_is_dispatchable( return false; }; + let meta_sized_did = LangItem::MetaSized.resolve_trait(db, krate); + let Some(meta_sized_did) = meta_sized_did else { + return false; + }; + // Type `U` + // FIXME: That seems problematic to fake a generic param like that? let unsized_self_ty = - TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner); + crate::next_solver::Ty::new_param(interner, self_param_id, u32::MAX, Symbol::empty()); // `Receiver[Self => U]` - let Some(unsized_receiver_ty) = receiver_for_self_ty(db, func, unsized_self_ty.clone()) else { - return false; - }; + let unsized_receiver_ty = receiver_for_self_ty(interner, func, receiver_ty, unsized_self_ty); - let self_ty = placeholder_subst.as_slice(Interner)[trait_self_idx].assert_ty_ref(Interner); - let unsized_predicate = WhereClause::Implemented(TraitRef { - trait_id: to_chalk_trait_id(unsize_did), - substitution: Substitution::from_iter(Interner, [self_ty.clone(), unsized_self_ty.clone()]), - }); - let trait_predicate = WhereClause::Implemented(TraitRef { - trait_id: to_chalk_trait_id(trait_), - substitution: Substitution::from_iter( - Interner, - std::iter::once(unsized_self_ty.cast(Interner)) - .chain(placeholder_subst.iter(Interner).skip(1).cloned()), - ), - }); + let param_env = { + let generic_predicates = &*db.generic_predicates_ns(func.into()); - let generic_predicates = &*db.generic_predicates(func.into()); + // Self: Unsize + let unsize_predicate = + TraitRef::new(interner, unsize_did.into(), [self_param_ty, unsized_self_ty]); - let clauses = std::iter::once(unsized_predicate) - .chain(std::iter::once(trait_predicate)) - .chain(generic_predicates.iter().map(|pred| { - pred.clone().substitute(Interner, &placeholder_subst).into_value_and_skipped_binders().0 - })) - .map(|pred| { - pred.cast::>(Interner).into_from_env_clause(Interner) + // U: Trait + let args = GenericArgs::for_item(interner, trait_.into(), |name, index, kind, _| { + if index == 0 { unsized_self_ty.into() } else { mk_param(interner, index, name, kind) } }); - let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses); - - let obligation = WhereClause::Implemented(TraitRef { - trait_id: to_chalk_trait_id(dispatch_from_dyn_did), - substitution: Substitution::from_iter(Interner, [receiver_ty.clone(), unsized_receiver_ty]), - }); - let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(obligation)).intern(Interner); - - let in_env = chalk_ir::InEnvironment::new(&env, goal); + let trait_predicate = TraitRef::new_from_args(interner, trait_.into(), args); + + let meta_sized_predicate = + TraitRef::new(interner, meta_sized_did.into(), [unsized_self_ty]); + + ParamEnv { + clauses: Clauses::new_from_iter( + interner, + generic_predicates.iter().copied().chain([ + unsize_predicate.upcast(interner), + trait_predicate.upcast(interner), + meta_sized_predicate.upcast(interner), + ]), + ), + } + }; - let mut table = chalk_solve::infer::InferenceTable::::new(); - let canonicalized = table.canonicalize(Interner, in_env); - let solution = db.trait_solve(krate, None, canonicalized.quantified); + // Receiver: DispatchFromDyn U]> + let predicate = + TraitRef::new(interner, dispatch_from_dyn_did.into(), [receiver_ty, unsized_receiver_ty]); + let goal = crate::next_solver::Goal::new(interner, param_env, predicate); - matches!(solution, Some(Solution::Unique(_))) + let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis()); + // the receiver is dispatchable iff the obligation holds + let res = next_trait_solve_in_ctxt(&infcx, goal); + res.map_or(false, |res| matches!(res.1, rustc_type_ir::solve::Certainty::Yes)) } -fn receiver_for_self_ty(db: &dyn HirDatabase, func: FunctionId, ty: Ty) -> Option { - let generics = generics(db, func.into()); - let trait_self_idx = trait_self_param_idx(db, func.into())?; - let subst = generics.placeholder_subst(db); - let subst = Substitution::from_iter( - Interner, - subst.iter(Interner).enumerate().map(|(idx, arg)| { - if idx == trait_self_idx { ty.clone().cast(Interner) } else { arg.clone() } - }), +fn receiver_for_self_ty<'db>( + interner: DbInterner<'db>, + func: FunctionId, + receiver_ty: crate::next_solver::Ty<'db>, + self_ty: crate::next_solver::Ty<'db>, +) -> crate::next_solver::Ty<'db> { + let args = crate::next_solver::GenericArgs::for_item( + interner, + SolverDefId::FunctionId(func), + |name, index, kind, _| { + if index == 0 { self_ty.into() } else { mk_param(interner, index, name, kind) } + }, ); - let sig = db.callable_item_signature(func.into()); - let sig = sig.substitute(Interner, &subst); - sig.params_and_return.first().cloned() + + crate::next_solver::EarlyBinder::bind(receiver_ty).instantiate(interner, args) } -fn contains_illegal_impl_trait_in_trait( - db: &dyn HirDatabase, - sig: &Binders, +fn contains_illegal_impl_trait_in_trait<'db>( + db: &'db dyn HirDatabase, + sig: &crate::next_solver::EarlyBinder< + 'db, + crate::next_solver::Binder<'db, rustc_type_ir::FnSig>>, + >, ) -> Option { - struct OpaqueTypeCollector(FxHashSet); - - impl TypeVisitor for OpaqueTypeCollector { - type BreakTy = (); + struct OpaqueTypeCollector(FxHashSet); - fn as_dyn(&mut self) -> &mut dyn TypeVisitor { - self - } - - fn interner(&self) -> Interner { - Interner - } + impl<'db> rustc_type_ir::TypeVisitor> for OpaqueTypeCollector { + type Result = ControlFlow<()>; - fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow { - if let TyKind::OpaqueType(opaque_ty_id, _) = ty.kind(Interner) { - self.0.insert(*opaque_ty_id); + fn visit_ty( + &mut self, + ty: as rustc_type_ir::Interner>::Ty, + ) -> Self::Result { + if let rustc_type_ir::TyKind::Alias(AliasTyKind::Opaque, op) = ty.kind() { + let id = match op.def_id { + SolverDefId::InternedOpaqueTyId(id) => id, + _ => unreachable!(), + }; + self.0.insert(id); } - ty.super_visit_with(self.as_dyn(), outer_binder) + ty.super_visit_with(self) } } - let ret = sig.skip_binders().ret(); + let ret = sig.skip_binder().output(); let mut visitor = OpaqueTypeCollector(FxHashSet::default()); - _ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST); + _ = ret.visit_with(&mut visitor); // Since we haven't implemented RPITIT in proper way like rustc yet, // just check whether `ret` contains RPIT for now for opaque_ty in visitor.0 { - let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty.into()); + let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty); if matches!(impl_trait_id, ImplTraitId::ReturnTypeImplTrait(..)) { return Some(MethodViolationCode::ReferencesImplTraitInTrait); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs index 5078e8cfaa8b9..04a9ba79921ad 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs @@ -56,18 +56,21 @@ fn check_dyn_compatibility<'a>( continue; }; let mut osvs = FxHashSet::default(); - _ = dyn_compatibility_with_callback(&db, trait_id, &mut |osv| { - osvs.insert(match osv { - DynCompatibilityViolation::SizedSelf => SizedSelf, - DynCompatibilityViolation::SelfReferential => SelfReferential, - DynCompatibilityViolation::Method(_, mvc) => Method(mvc), - DynCompatibilityViolation::AssocConst(_) => AssocConst, - DynCompatibilityViolation::GAT(_) => GAT, - DynCompatibilityViolation::HasNonCompatibleSuperTrait(_) => { - HasNonCompatibleSuperTrait - } + let db = &db; + salsa::attach(db, || { + _ = dyn_compatibility_with_callback(db, trait_id, &mut |osv| { + osvs.insert(match osv { + DynCompatibilityViolation::SizedSelf => SizedSelf, + DynCompatibilityViolation::SelfReferential => SelfReferential, + DynCompatibilityViolation::Method(_, mvc) => Method(mvc), + DynCompatibilityViolation::AssocConst(_) => AssocConst, + DynCompatibilityViolation::GAT(_) => GAT, + DynCompatibilityViolation::HasNonCompatibleSuperTrait(_) => { + HasNonCompatibleSuperTrait + } + }); + ControlFlow::Continue(()) }); - ControlFlow::Continue(()) }); assert_eq!(osvs, expected, "dyn-compatibility violations for `{name}` do not match;"); } @@ -250,7 +253,8 @@ trait Bar { trait Baz : Bar { } "#, - [("Bar", vec![]), ("Baz", vec![SizedSelf, SelfReferential])], + // FIXME: We should also report `SizedSelf` here + [("Bar", vec![]), ("Baz", vec![SelfReferential])], ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index f14872e68c3f5..e179e41b1cbe2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -165,6 +165,21 @@ impl Generics { (parent_len, self_param, type_params, const_params, impl_trait_params, lifetime_params) } + pub(crate) fn type_or_const_param( + &self, + param: TypeOrConstParamId, + ) -> Option<(usize, TypeOrConstParamData)> { + let idx = self.find_type_or_const_param(param)?; + self.iter().nth(idx).and_then(|p| { + let data = match p.1 { + GenericParamDataRef::TypeParamData(p) => p.clone().into(), + GenericParamDataRef::ConstParamData(p) => p.clone().into(), + _ => return None, + }; + Some((idx, data)) + }) + } + pub fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option { self.find_type_or_const_param(param) } @@ -241,15 +256,15 @@ impl Generics { pub fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution { Substitution::from_iter( Interner, - self.iter_id().map(|id| match id { + self.iter_id().enumerate().map(|(index, id)| match id { GenericParamId::TypeParamId(id) => { - to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner) + to_placeholder_idx(db, id.into(), index as u32).to_ty(Interner).cast(Interner) } - GenericParamId::ConstParamId(id) => to_placeholder_idx(db, id.into()) + GenericParamId::ConstParamId(id) => to_placeholder_idx(db, id.into(), index as u32) .to_const(Interner, db.const_param_ty(id)) .cast(Interner), GenericParamId::LifetimeParamId(id) => { - lt_to_placeholder_idx(db, id).to_lifetime(Interner).cast(Interner) + lt_to_placeholder_idx(db, id, index as u32).to_lifetime(Interner).cast(Interner) } }), ) @@ -258,7 +273,7 @@ impl Generics { pub(crate) fn trait_self_param_idx(db: &dyn DefDatabase, def: GenericDefId) -> Option { match def { - GenericDefId::TraitId(_) | GenericDefId::TraitAliasId(_) => { + GenericDefId::TraitId(_) => { let params = db.generic_params(def); params.trait_self_param().map(|idx| idx.into_raw().into_u32() as usize) } @@ -272,7 +287,7 @@ pub(crate) fn trait_self_param_idx(db: &dyn DefDatabase, def: GenericDefId) -> O } } -fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option { +pub(crate) fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option { let container = match def { GenericDefId::FunctionId(it) => it.lookup(db).container, GenericDefId::TypeAliasId(it) => it.lookup(db).container, @@ -280,8 +295,7 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option return None, + | GenericDefId::ImplId(_) => return None, }; match container { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 86345b23364d3..017119781a709 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -13,6 +13,7 @@ //! to certain types. To record this, we use the union-find implementation from //! the `ena` crate, which is extracted from rustc. +mod autoderef; pub(crate) mod cast; pub(crate) mod closure; mod coerce; @@ -25,6 +26,7 @@ pub(crate) mod unify; use std::{cell::OnceCell, convert::identity, iter, ops::Index}; +use base_db::Crate; use chalk_ir::{ DebruijnIndex, Mutability, Safety, Scalar, TyKind, TypeFlags, Variance, cast::Cast, @@ -54,25 +56,30 @@ use rustc_hash::{FxHashMap, FxHashSet}; use stdx::{always, never}; use triomphe::Arc; +use crate::db::InternedClosureId; use crate::{ - AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, - ImplTraitIdx, InEnvironment, IncorrectGenericsLenKind, Interner, Lifetime, OpaqueTyId, - ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, Substitution, TraitEnvironment, Ty, + AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, ImplTraitId, ImplTraitIdx, + IncorrectGenericsLenKind, Interner, Lifetime, OpaqueTyId, ParamLoweringMode, + PathLoweringDiagnostic, ProjectionTy, Substitution, TargetFeatures, TraitEnvironment, Ty, TyBuilder, TyExt, db::HirDatabase, fold_tys, generics::Generics, infer::{ - coerce::CoerceMany, + coerce::{CoerceMany, DynamicCoerceMany}, diagnostics::{Diagnostics, InferenceTyLoweringContext as TyLoweringContext}, expr::ExprIsRead, unify::InferenceTable, }, lower::{ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic}, mir::MirSpan, + next_solver::{ + self, DbInterner, + mapping::{ChalkToNextSolver, NextSolverToChalk}, + }, static_lifetime, to_assoc_type_id, traits::FnTrait, - utils::UnevaluatedConstEvaluatorFolder, + utils::{TargetFeatureIsSafeInTarget, UnevaluatedConstEvaluatorFolder}, }; // This lint has a false positive here. See the link below for details. @@ -84,7 +91,7 @@ pub use coerce::could_coerce; pub use unify::{could_unify, could_unify_deeply}; use cast::{CastCheck, CastError}; -pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; +pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; /// The entry point of type inference. pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { @@ -144,6 +151,7 @@ pub(crate) fn infer_cycle_result(_: &dyn HirDatabase, _: DefWithBodyId) -> Arc, ty: Ty) -> Ty { // FIXME: TypeFlags::HAS_CT_PROJECTION is not implemented in chalk, so TypeFlags::HAS_PROJECTION only // works for the type case, so we check array unconditionally. Remove the array part @@ -156,7 +164,7 @@ pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc, let mut table = unify::InferenceTable::new(db, trait_env); let ty_with_vars = table.normalize_associated_types_in(ty); - table.resolve_obligations_as_possible(); + table.select_obligations_where_possible(); table.propagate_diverging_flag(); table.resolve_completely(ty_with_vars) } @@ -180,16 +188,12 @@ impl BindingMode { } } +// FIXME: Remove this `InferOk`, switch all code to the second one, that uses `Obligation` instead of `Goal`. #[derive(Debug)] -pub(crate) struct InferOk { +pub(crate) struct InferOk<'db, T> { + #[allow(dead_code)] value: T, - goals: Vec>, -} - -impl InferOk { - fn map(self, f: impl FnOnce(T) -> U) -> InferOk { - InferOk { value: f(self.value), goals: self.goals } - } + goals: Vec>>, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] @@ -202,7 +206,7 @@ pub enum InferenceTyDiagnosticSource { #[derive(Debug)] pub(crate) struct TypeError; -pub(crate) type InferResult = Result, TypeError>; +pub(crate) type InferResult<'db, T> = Result, TypeError>; #[derive(Debug, PartialEq, Eq, Clone)] pub enum InferenceDiagnostic { @@ -375,6 +379,26 @@ impl Adjustment { } } +/// At least for initial deployment, we want to limit two-phase borrows to +/// only a few specific cases. Right now, those are mostly "things that desugar" +/// into method calls: +/// - using `x.some_method()` syntax, where some_method takes `&mut self`, +/// - using `Foo::some_method(&mut x, ...)` syntax, +/// - binary assignment operators (`+=`, `-=`, `*=`, etc.). +/// +/// Anything else should be rejected until generalized two-phase borrow support +/// is implemented. Right now, dataflow can't handle the general case where there +/// is more than one use of a mutable borrow, and we don't want to accept too much +/// new code via two-phase borrows, so we try to limit where we create two-phase +/// capable mutable borrows. +/// See #49434 for tracking. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub(crate) enum AllowTwoPhase { + // FIXME: We should use this when appropriate. + Yes, + No, +} + #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum Adjust { /// Go from ! to any type. @@ -390,8 +414,6 @@ pub enum Adjust { /// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`. /// The target type is `U` in both cases, with the region and mutability /// being those shared by both the receiver and the returned reference. -/// -/// Mutability is `None` when we are not sure. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct OverloadedDeref(pub Option); @@ -653,6 +675,7 @@ pub(crate) struct InferenceContext<'db> { /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext /// and resolve the path via its methods. This will ensure proper error reporting. pub(crate) resolver: Resolver<'db>, + target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, generic_def: GenericDefId, generics: OnceCell, table: unify::InferenceTable<'db>, @@ -670,11 +693,11 @@ pub(crate) struct InferenceContext<'db> { /// If `Some`, this stores coercion information for returned /// expressions. If `None`, this is in a context where return is /// inappropriate, such as a const expression. - return_coercion: Option, + return_coercion: Option>, /// The resume type and the yield type, respectively, of the coroutine being inferred. resume_yield_tys: Option<(Ty, Ty)>, diverges: Diverges, - breakables: Vec, + breakables: Vec>, /// Whether we are inside the pattern of a destructuring assignment. inside_assignment: bool, @@ -689,21 +712,21 @@ pub(crate) struct InferenceContext<'db> { /// We do that because sometimes we truncate projections (when a closure captures /// both `a.b` and `a.b.c`), and we want to provide accurate spans in this case. current_capture_span_stack: Vec, - current_closure: Option, + current_closure: Option, /// Stores the list of closure ids that need to be analyzed before this closure. See the /// comment on `InferenceContext::sort_closures` - closure_dependencies: FxHashMap>, - deferred_closures: FxHashMap, ExprId)>>, + closure_dependencies: FxHashMap>, + deferred_closures: FxHashMap, ExprId)>>, diagnostics: Diagnostics, } #[derive(Clone, Debug)] -struct BreakableContext { +struct BreakableContext<'db> { /// Whether this context contains at least one break expression. may_break: bool, /// The coercion target of the context. - coerce: Option, + coerce: Option>, /// The optional label of the context. label: Option, kind: BreakableKind, @@ -718,10 +741,10 @@ enum BreakableKind { Border, } -fn find_breakable( - ctxs: &mut [BreakableContext], +fn find_breakable<'a, 'db>( + ctxs: &'a mut [BreakableContext<'db>], label: Option, -) -> Option<&mut BreakableContext> { +) -> Option<&'a mut BreakableContext<'db>> { let mut ctxs = ctxs .iter_mut() .rev() @@ -732,10 +755,10 @@ fn find_breakable( } } -fn find_continuable( - ctxs: &mut [BreakableContext], +fn find_continuable<'a, 'db>( + ctxs: &'a mut [BreakableContext<'db>], label: Option, -) -> Option<&mut BreakableContext> { +) -> Option<&'a mut BreakableContext<'db>> { match label { Some(_) => find_breakable(ctxs, label).filter(|it| matches!(it.kind, BreakableKind::Loop)), None => find_breakable(ctxs, label), @@ -756,6 +779,7 @@ impl<'db> InferenceContext<'db> { ) -> Self { let trait_env = db.trait_environment_for_body(owner); InferenceContext { + target_features: OnceCell::new(), generics: OnceCell::new(), result: InferenceResult::default(), table: unify::InferenceTable::new(db, trait_env), @@ -791,18 +815,56 @@ impl<'db> InferenceContext<'db> { self.generics.get_or_init(|| crate::generics::generics(self.db, self.generic_def)) } + #[inline] + fn krate(&self) -> Crate { + self.resolver.krate() + } + + fn target_features<'a>( + db: &dyn HirDatabase, + target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, + owner: DefWithBodyId, + krate: Crate, + ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) { + let (target_features, target_feature_is_safe) = target_features.get_or_init(|| { + let target_features = match owner { + DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())), + _ => TargetFeatures::default(), + }; + let target_feature_is_safe = match &krate.workspace_data(db).target { + Ok(target) => crate::utils::target_feature_is_safe_in_target(target), + Err(_) => TargetFeatureIsSafeInTarget::No, + }; + (target_features, target_feature_is_safe) + }); + (target_features, *target_feature_is_safe) + } + + #[inline] + pub(crate) fn set_tainted_by_errors(&mut self) { + self.result.has_errors = true; + } + // FIXME: This function should be private in module. It is currently only used in the consteval, since we need // `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you // used this function for another workaround, mention it here. If you really need this function and believe that // there is no problem in it being `pub(crate)`, remove this comment. - pub(crate) fn resolve_all(self) -> InferenceResult { + pub(crate) fn resolve_all(mut self) -> InferenceResult { + self.table.select_obligations_where_possible(); + self.table.fallback_if_possible(); + + // Comment from rustc: + // Even though coercion casts provide type hints, we check casts after fallback for + // backwards compatibility. This makes fallback a stronger type hint than a cast coercion. + let cast_checks = std::mem::take(&mut self.deferred_cast_checks); + for mut cast in cast_checks.into_iter() { + if let Err(diag) = cast.check(&mut self) { + self.diagnostics.push(diag); + } + } + let InferenceContext { - mut table, - mut result, - mut deferred_cast_checks, - tuple_field_accesses_rev, - diagnostics, - .. + mut table, mut result, tuple_field_accesses_rev, diagnostics, .. } = self; let mut diagnostics = diagnostics.finish(); // Destructure every single field so whenever new fields are added to `InferenceResult` we @@ -828,30 +890,12 @@ impl<'db> InferenceContext<'db> { closure_info: _, mutated_bindings_in_closure: _, tuple_field_access_types: _, - coercion_casts, + coercion_casts: _, diagnostics: _, } = &mut result; - table.fallback_if_possible(); - - // Comment from rustc: - // Even though coercion casts provide type hints, we check casts after fallback for - // backwards compatibility. This makes fallback a stronger type hint than a cast coercion. - let mut apply_adjustments = |expr, adj: Vec<_>| { - expr_adjustments.insert(expr, adj.into_boxed_slice()); - }; - let mut set_coercion_cast = |expr| { - coercion_casts.insert(expr); - }; - for cast in deferred_cast_checks.iter_mut() { - if let Err(diag) = - cast.check(&mut table, &mut apply_adjustments, &mut set_coercion_cast) - { - diagnostics.push(diag); - } - } // FIXME resolve obligations as well (use Guidance if necessary) - table.resolve_obligations_as_possible(); + table.select_obligations_where_possible(); // make sure diverging type variables are marked as such table.propagate_diverging_flag(); @@ -920,13 +964,15 @@ impl<'db> InferenceContext<'db> { }); diagnostics.shrink_to_fit(); for (_, subst) in method_resolutions.values_mut() { - *subst = table.resolve_completely(subst.clone()); + *subst = + table.resolve_completely::<_, crate::next_solver::GenericArgs<'db>>(subst.clone()); *has_errors = *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown()); } method_resolutions.shrink_to_fit(); for (_, subst) in assoc_resolutions.values_mut() { - *subst = table.resolve_completely(subst.clone()); + *subst = + table.resolve_completely::<_, crate::next_solver::GenericArgs<'db>>(subst.clone()); *has_errors = *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown()); } @@ -944,7 +990,12 @@ impl<'db> InferenceContext<'db> { result.tuple_field_access_types = tuple_field_accesses_rev .into_iter() .enumerate() - .map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst))) + .map(|(idx, subst)| { + ( + TupleId(idx as u32), + table.resolve_completely::<_, crate::next_solver::GenericArgs<'db>>(subst), + ) + }) .inspect(|(_, subst)| { *has_errors = *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown()); @@ -1013,14 +1064,12 @@ impl<'db> InferenceContext<'db> { if let Some(self_param) = self.body.self_param && let Some(ty) = param_tys.next() { - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); + let ty = self.process_user_written_ty(ty); self.write_binding_ty(self_param, ty); } let mut tait_candidates = FxHashSet::default(); for (ty, pat) in param_tys.zip(&*self.body.params) { - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); + let ty = self.process_user_written_ty(ty); self.infer_top_pat(*pat, &ty, None); if ty @@ -1071,20 +1120,29 @@ impl<'db> InferenceContext<'db> { None => self.result.standard_types.unit.clone(), }; - self.return_ty = self.normalize_associated_types_in(return_ty); - self.return_coercion = Some(CoerceMany::new(self.return_ty.clone())); + self.return_ty = self.process_user_written_ty(return_ty); + self.return_coercion = + Some(CoerceMany::new(self.return_ty.to_nextsolver(self.table.interner))); // Functions might be defining usage sites of TAITs. // To define an TAITs, that TAIT must appear in the function's signatures. // So, it suffices to check for params and return types. - if self - .return_ty - .data(Interner) - .flags - .intersects(TypeFlags::HAS_TY_OPAQUE.union(TypeFlags::HAS_TY_INFER)) - { - tait_candidates.insert(self.return_ty.clone()); - } + fold_tys( + self.return_ty.clone(), + |ty, _| { + match ty.kind(Interner) { + TyKind::OpaqueType(..) + | TyKind::Alias(AliasTy::Opaque(..)) + | TyKind::InferenceVar(..) => { + tait_candidates.insert(self.return_ty.clone()); + } + _ => {} + } + ty + }, + DebruijnIndex::INNERMOST, + ); + self.make_tait_coercion_table(tait_candidates.iter()); } @@ -1100,8 +1158,12 @@ impl<'db> InferenceContext<'db> { fold_tys( t, |ty, _| { + let ty = self.table.structurally_resolve_type(&ty); let opaque_ty_id = match ty.kind(Interner) { - TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id, + TyKind::OpaqueType(opaque_ty_id, _) + | TyKind::Alias(AliasTy::Opaque(crate::OpaqueTy { opaque_ty_id, .. })) => { + *opaque_ty_id + } _ => return ty, }; let (impl_traits, idx) = @@ -1197,9 +1259,11 @@ impl<'db> InferenceContext<'db> { ty: &chalk_ir::Ty, outer_binder: DebruijnIndex, ) -> std::ops::ControlFlow { - let ty = self.table.resolve_ty_shallow(ty); + let ty = self.table.structurally_resolve_type(ty); - if let TyKind::OpaqueType(id, _) = ty.kind(Interner) + if let TyKind::OpaqueType(id, _) + | TyKind::Alias(AliasTy::Opaque(crate::OpaqueTy { opaque_ty_id: id, .. })) = + ty.kind(Interner) && let ImplTraitId::TypeAliasImplTrait(alias_id, _) = self.db.lookup_intern_impl_trait_id((*id).into()) { @@ -1344,6 +1408,13 @@ impl<'db> InferenceContext<'db> { } } + fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[Ty]>) { + if adjustments.is_empty() { + return; + } + self.result.pat_adjustments.entry(pat).or_default().extend(adjustments); + } + fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) { self.result.method_resolutions.insert(expr, (func, subst)); } @@ -1405,8 +1476,7 @@ impl<'db> InferenceContext<'db> { ) -> Ty { let ty = self .with_ty_lowering(store, type_source, lifetime_elision, |ctx| ctx.lower_ty(type_ref)); - let ty = self.insert_type_vars(ty); - self.normalize_associated_types_in(ty) + self.process_user_written_ty(ty) } fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty { @@ -1471,7 +1541,8 @@ impl<'db> InferenceContext<'db> { } fn push_obligation(&mut self, o: DomainGoal) { - self.table.register_obligation(o.cast(Interner)); + let goal: crate::Goal = o.cast(Interner); + self.table.register_obligation(goal.to_nextsolver(self.table.interner)); } fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { @@ -1551,23 +1622,33 @@ impl<'db> InferenceContext<'db> { ty } - /// Recurses through the given type, normalizing associated types mentioned - /// in it by replacing them by type variables and registering obligations to - /// resolve later. This should be done once for every type we get from some - /// type annotation (e.g. from a let type annotation, field type or function - /// call). `make_ty` handles this already, but e.g. for field types we need - /// to do it as well. - fn normalize_associated_types_in(&mut self, ty: T) -> T + /// Whenever you lower a user-written type, you should call this. + fn process_user_written_ty(&mut self, ty: T) -> T where - T: HasInterner + TypeFoldable, + T: HasInterner + TypeFoldable + ChalkToNextSolver<'db, U>, + U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable>, { - self.table.normalize_associated_types_in(ty) + self.table.process_user_written_ty(ty) + } + + /// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation, + /// while `process_user_written_ty()` should (but doesn't currently). + fn process_remote_user_written_ty(&mut self, ty: T) -> T + where + T: HasInterner + TypeFoldable + ChalkToNextSolver<'db, U>, + U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable>, + { + self.table.process_remote_user_written_ty(ty) } fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty { self.table.resolve_ty_shallow(ty) } + fn shallow_resolve(&self, ty: crate::next_solver::Ty<'db>) -> crate::next_solver::Ty<'db> { + self.table.shallow_resolve(ty) + } + fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option) -> Ty { self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[]) } @@ -1737,7 +1818,7 @@ impl<'db> InferenceContext<'db> { ty = self.table.insert_type_vars(ty); ty = self.table.normalize_associated_types_in(ty); - ty = self.table.resolve_ty_shallow(&ty); + ty = self.table.structurally_resolve_type(&ty); if ty.is_unknown() { return (self.err_ty(), None); } @@ -1779,7 +1860,6 @@ impl<'db> InferenceContext<'db> { TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) - | TypeNs::TraitAliasId(_) | TypeNs::ModuleId(_) => { // FIXME diagnostic (self.err_ty(), None) @@ -1809,7 +1889,7 @@ impl<'db> InferenceContext<'db> { let ty = match ty.kind(Interner) { TyKind::Alias(AliasTy::Projection(proj_ty)) => { let ty = self.table.normalize_projection_ty(proj_ty.clone()); - self.table.resolve_ty_shallow(&ty) + self.table.structurally_resolve_type(&ty) } _ => ty, }; @@ -2039,7 +2119,7 @@ impl Expectation { fn adjust_for_branches(&self, table: &mut unify::InferenceTable<'_>) -> Expectation { match self { Expectation::HasType(ety) => { - let ety = table.resolve_ty_shallow(ety); + let ety = table.structurally_resolve_type(ety); if ety.is_ty_var() { Expectation::None } else { Expectation::HasType(ety) } } Expectation::RValueLikeUnsized(ety) => Expectation::RValueLikeUnsized(ety.clone()), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/autoderef.rs new file mode 100644 index 0000000000000..77b1ae6a94a46 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/autoderef.rs @@ -0,0 +1,54 @@ +//! Autoderef helpers for inference. + +use std::iter; + +use crate::{ + Adjust, Adjustment, OverloadedDeref, + autoderef::{Autoderef, AutoderefKind}, + infer::unify::InferenceTable, + next_solver::{ + Ty, + infer::{InferOk, traits::PredicateObligations}, + mapping::NextSolverToChalk, + }, +}; + +impl<'db> InferenceTable<'db> { + pub(crate) fn autoderef(&mut self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> { + Autoderef::new(self, base_ty) + } +} + +impl<'db> Autoderef<'_, 'db> { + /// Returns the adjustment steps. + pub(crate) fn adjust_steps(mut self) -> Vec { + let infer_ok = self.adjust_steps_as_infer_ok(); + self.table.register_infer_ok(infer_ok) + } + + pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec> { + let steps = self.steps(); + if steps.is_empty() { + return InferOk { obligations: PredicateObligations::new(), value: vec![] }; + } + + let targets = steps.iter().skip(1).map(|&(ty, _)| ty).chain(iter::once(self.final_ty())); + let steps: Vec<_> = steps + .iter() + .map(|&(_source, kind)| { + if let AutoderefKind::Overloaded = kind { + Some(OverloadedDeref(Some(chalk_ir::Mutability::Not))) + } else { + None + } + }) + .zip(targets) + .map(|(autoderef, target)| Adjustment { + kind: Adjust::Deref(autoderef), + target: target.to_chalk(self.table.interner), + }) + .collect(); + + InferOk { obligations: self.take_obligations(), value: steps } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs index f0a4167f8e250..4cd6144a14cba 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs @@ -1,13 +1,17 @@ //! Type cast logic. Basically coercion + additional casts. use chalk_ir::{Mutability, Scalar, TyVariableKind, UintTy}; -use hir_def::{AdtId, hir::ExprId}; +use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags}; use stdx::never; +use crate::infer::coerce::CoerceNever; use crate::{ - Adjustment, Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex, - QuantifiedWhereClauses, Ty, TyExt, TyKind, TypeFlags, WhereClause, - infer::{coerce::CoerceNever, unify::InferenceTable}, + Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex, QuantifiedWhereClauses, Ty, + TyExt, TyKind, TypeFlags, WhereClause, + db::HirDatabase, + from_chalk_trait_id, + infer::{AllowTwoPhase, InferenceContext}, + next_solver::mapping::ChalkToNextSolver, }; #[derive(Debug)] @@ -30,7 +34,7 @@ pub(crate) enum CastTy { } impl CastTy { - pub(crate) fn from_ty(table: &mut InferenceTable<'_>, t: &Ty) -> Option { + pub(crate) fn from_ty(db: &dyn HirDatabase, t: &Ty) -> Option { match t.kind(Interner) { TyKind::Scalar(Scalar::Bool) => Some(Self::Int(Int::Bool)), TyKind::Scalar(Scalar::Char) => Some(Self::Int(Int::Char)), @@ -43,8 +47,8 @@ impl CastTy { let (AdtId::EnumId(id), _) = t.as_adt()? else { return None; }; - let enum_data = id.enum_variants(table.db); - if enum_data.is_payload_free(table.db) { Some(Self::Int(Int::CEnum)) } else { None } + let enum_data = id.enum_variants(db); + if enum_data.is_payload_free(db) { Some(Self::Int(Int::CEnum)) } else { None } } TyKind::Raw(m, ty) => Some(Self::Ptr(ty.clone(), *m)), TyKind::Function(_) => Some(Self::FnPtr), @@ -91,25 +95,34 @@ impl CastCheck { Self { expr, source_expr, expr_ty, cast_ty } } - pub(super) fn check( + pub(super) fn check( &mut self, - table: &mut InferenceTable<'_>, - apply_adjustments: &mut F, - set_coercion_cast: &mut G, - ) -> Result<(), InferenceDiagnostic> - where - F: FnMut(ExprId, Vec), - G: FnMut(ExprId), - { - self.expr_ty = table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty.clone()); - self.cast_ty = table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty.clone()); + ctx: &mut InferenceContext<'_>, + ) -> Result<(), InferenceDiagnostic> { + self.expr_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty.clone()); + self.cast_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty.clone()); + + // This should always come first so that we apply the coercion, which impacts infer vars. + if ctx + .coerce( + self.source_expr.into(), + self.expr_ty.to_nextsolver(ctx.table.interner), + self.cast_ty.to_nextsolver(ctx.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) + .is_ok() + { + ctx.result.coercion_casts.insert(self.source_expr); + return Ok(()); + } if self.expr_ty.contains_unknown() || self.cast_ty.contains_unknown() { return Ok(()); } if !self.cast_ty.data(Interner).flags.contains(TypeFlags::HAS_TY_INFER) - && !table.is_sized(&self.cast_ty) + && !ctx.table.is_sized(&self.cast_ty) { return Err(InferenceDiagnostic::CastToUnsized { expr: self.expr, @@ -124,76 +137,63 @@ impl CastCheck { return Ok(()); } - if let Ok((adj, _)) = table.coerce(&self.expr_ty, &self.cast_ty, CoerceNever::Yes) { - apply_adjustments(self.source_expr, adj); - set_coercion_cast(self.source_expr); - return Ok(()); - } - - self.do_check(table, apply_adjustments) + self.do_check(ctx) .map_err(|e| e.into_diagnostic(self.expr, self.expr_ty.clone(), self.cast_ty.clone())) } - fn do_check( - &self, - table: &mut InferenceTable<'_>, - apply_adjustments: &mut F, - ) -> Result<(), CastError> - where - F: FnMut(ExprId, Vec), - { - let (t_from, t_cast) = - match (CastTy::from_ty(table, &self.expr_ty), CastTy::from_ty(table, &self.cast_ty)) { - (Some(t_from), Some(t_cast)) => (t_from, t_cast), - (None, Some(t_cast)) => match self.expr_ty.kind(Interner) { - TyKind::FnDef(..) => { - let sig = self.expr_ty.callable_sig(table.db).expect("FnDef had no sig"); - let sig = table.eagerly_normalize_and_resolve_shallow_in(sig); - let fn_ptr = TyKind::Function(sig.to_fn_ptr()).intern(Interner); - if let Ok((adj, _)) = table.coerce(&self.expr_ty, &fn_ptr, CoerceNever::Yes) - { - apply_adjustments(self.source_expr, adj); - } else { - return Err(CastError::IllegalCast); - } - - (CastTy::FnPtr, t_cast) + fn do_check(&self, ctx: &mut InferenceContext<'_>) -> Result<(), CastError> { + let (t_from, t_cast) = match ( + CastTy::from_ty(ctx.db, &self.expr_ty), + CastTy::from_ty(ctx.db, &self.cast_ty), + ) { + (Some(t_from), Some(t_cast)) => (t_from, t_cast), + (None, Some(t_cast)) => match self.expr_ty.kind(Interner) { + TyKind::FnDef(..) => { + let sig = self.expr_ty.callable_sig(ctx.db).expect("FnDef had no sig"); + let sig = ctx.table.eagerly_normalize_and_resolve_shallow_in(sig); + let fn_ptr = TyKind::Function(sig.to_fn_ptr()).intern(Interner); + if ctx + .coerce( + self.source_expr.into(), + self.expr_ty.to_nextsolver(ctx.table.interner), + fn_ptr.to_nextsolver(ctx.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) + .is_ok() + { + } else { + return Err(CastError::IllegalCast); } - TyKind::Ref(mutbl, _, inner_ty) => { - return match t_cast { - CastTy::Int(_) | CastTy::Float => match inner_ty.kind(Interner) { - TyKind::Scalar( - Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_), - ) - | TyKind::InferenceVar( - _, - TyVariableKind::Integer | TyVariableKind::Float, - ) => Err(CastError::NeedDeref), - - _ => Err(CastError::NeedViaPtr), - }, - // array-ptr-cast - CastTy::Ptr(t, m) => { - let t = table.eagerly_normalize_and_resolve_shallow_in(t); - if !table.is_sized(&t) { - return Err(CastError::IllegalCast); - } - self.check_ref_cast( - table, - inner_ty, - *mutbl, - &t, - m, - apply_adjustments, - ) + + (CastTy::FnPtr, t_cast) + } + TyKind::Ref(mutbl, _, inner_ty) => { + return match t_cast { + CastTy::Int(_) | CastTy::Float => match inner_ty.kind(Interner) { + TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) + | TyKind::InferenceVar( + _, + TyVariableKind::Integer | TyVariableKind::Float, + ) => Err(CastError::NeedDeref), + + _ => Err(CastError::NeedViaPtr), + }, + // array-ptr-cast + CastTy::Ptr(t, m) => { + let t = ctx.table.eagerly_normalize_and_resolve_shallow_in(t); + if !ctx.table.is_sized(&t) { + return Err(CastError::IllegalCast); } - _ => Err(CastError::NonScalar), - }; - } - _ => return Err(CastError::NonScalar), - }, + self.check_ref_cast(ctx, inner_ty, *mutbl, &t, m) + } + _ => Err(CastError::NonScalar), + }; + } _ => return Err(CastError::NonScalar), - }; + }, + _ => return Err(CastError::NonScalar), + }; // rustc checks whether the `expr_ty` is foreign adt with `non_exhaustive` sym @@ -207,12 +207,10 @@ impl CastCheck { } (CastTy::Int(Int::Bool | Int::CEnum | Int::Char) | CastTy::Float, CastTy::Ptr(..)) | (CastTy::Ptr(..) | CastTy::FnPtr, CastTy::Float) => Err(CastError::IllegalCast), - (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => { - self.check_ptr_ptr_cast(table, &src, &dst) - } - (CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(table, &src), - (CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(table, &dst), - (CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(table, &dst), + (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, &src, &dst), + (CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(ctx, &src), + (CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(ctx, &dst), + (CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(ctx, &dst), (CastTy::Int(Int::CEnum), CastTy::Int(_)) => Ok(()), (CastTy::Int(Int::Char | Int::Bool), CastTy::Int(_)) => Ok(()), (CastTy::Int(_) | CastTy::Float, CastTy::Int(_) | CastTy::Float) => Ok(()), @@ -220,26 +218,30 @@ impl CastCheck { } } - fn check_ref_cast( + fn check_ref_cast( &self, - table: &mut InferenceTable<'_>, + ctx: &mut InferenceContext<'_>, t_expr: &Ty, m_expr: Mutability, t_cast: &Ty, m_cast: Mutability, - apply_adjustments: &mut F, - ) -> Result<(), CastError> - where - F: FnMut(ExprId, Vec), - { + ) -> Result<(), CastError> { // Mutability order is opposite to rustc. `Mut < Not` if m_expr <= m_cast && let TyKind::Array(ety, _) = t_expr.kind(Interner) { // Coerce to a raw pointer so that we generate RawPtr in MIR. let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner); - if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) { - apply_adjustments(self.source_expr, adj); + if ctx + .coerce( + self.source_expr.into(), + self.expr_ty.to_nextsolver(ctx.table.interner), + array_ptr_type.to_nextsolver(ctx.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) + .is_ok() + { } else { never!( "could not cast from reference to array to pointer to array ({:?} to {:?})", @@ -250,7 +252,16 @@ impl CastCheck { // This is a less strict condition than rustc's `demand_eqtype`, // but false negative is better than false positive - if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() { + if ctx + .coerce( + self.source_expr.into(), + ety.to_nextsolver(ctx.table.interner), + t_cast.to_nextsolver(ctx.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) + .is_ok() + { return Ok(()); } } @@ -260,12 +271,12 @@ impl CastCheck { fn check_ptr_ptr_cast( &self, - table: &mut InferenceTable<'_>, + ctx: &mut InferenceContext<'_>, src: &Ty, dst: &Ty, ) -> Result<(), CastError> { - let src_kind = pointer_kind(src, table).map_err(|_| CastError::Unknown)?; - let dst_kind = pointer_kind(dst, table).map_err(|_| CastError::Unknown)?; + let src_kind = pointer_kind(src, ctx).map_err(|_| CastError::Unknown)?; + let dst_kind = pointer_kind(dst, ctx).map_err(|_| CastError::Unknown)?; match (src_kind, dst_kind) { (Some(PointerKind::Error), _) | (_, Some(PointerKind::Error)) => Ok(()), @@ -290,10 +301,12 @@ impl CastCheck { return Ok(()); } let src_principal = - table.db.trait_datum(table.trait_env.krate, src_principal); + ctx.db.trait_signature(from_chalk_trait_id(src_principal)); let dst_principal = - table.db.trait_datum(table.trait_env.krate, dst_principal); - if src_principal.is_auto_trait() && dst_principal.is_auto_trait() { + ctx.db.trait_signature(from_chalk_trait_id(dst_principal)); + if src_principal.flags.contains(TraitFlags::AUTO) + && dst_principal.flags.contains(TraitFlags::AUTO) + { Ok(()) } else { Err(CastError::DifferingKinds) @@ -309,10 +322,10 @@ impl CastCheck { fn check_ptr_addr_cast( &self, - table: &mut InferenceTable<'_>, + ctx: &mut InferenceContext<'_>, expr_ty: &Ty, ) -> Result<(), CastError> { - match pointer_kind(expr_ty, table).map_err(|_| CastError::Unknown)? { + match pointer_kind(expr_ty, ctx).map_err(|_| CastError::Unknown)? { // None => Err(CastError::UnknownExprPtrKind), None => Ok(()), Some(PointerKind::Error) => Ok(()), @@ -323,10 +336,10 @@ impl CastCheck { fn check_addr_ptr_cast( &self, - table: &mut InferenceTable<'_>, + ctx: &mut InferenceContext<'_>, cast_ty: &Ty, ) -> Result<(), CastError> { - match pointer_kind(cast_ty, table).map_err(|_| CastError::Unknown)? { + match pointer_kind(cast_ty, ctx).map_err(|_| CastError::Unknown)? { // None => Err(CastError::UnknownCastPtrKind), None => Ok(()), Some(PointerKind::Error) => Ok(()), @@ -339,10 +352,10 @@ impl CastCheck { fn check_fptr_ptr_cast( &self, - table: &mut InferenceTable<'_>, + ctx: &mut InferenceContext<'_>, cast_ty: &Ty, ) -> Result<(), CastError> { - match pointer_kind(cast_ty, table).map_err(|_| CastError::Unknown)? { + match pointer_kind(cast_ty, ctx).map_err(|_| CastError::Unknown)? { // None => Err(CastError::UnknownCastPtrKind), None => Ok(()), Some(PointerKind::Error) => Ok(()), @@ -365,10 +378,10 @@ enum PointerKind { Error, } -fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result, ()> { - let ty = table.eagerly_normalize_and_resolve_shallow_in(ty.clone()); +fn pointer_kind(ty: &Ty, ctx: &mut InferenceContext<'_>) -> Result, ()> { + let ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(ty.clone()); - if table.is_sized(&ty) { + if ctx.table.is_sized(&ty) { return Ok(Some(PointerKind::Thin)); } @@ -381,11 +394,11 @@ fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result) -> Result { match subst.iter(Interner).last().and_then(|arg| arg.ty(Interner)) { None => Ok(Some(PointerKind::Thin)), - Some(ty) => pointer_kind(ty, table), + Some(ty) => pointer_kind(ty, ctx), } } TyKind::Foreign(_) => Ok(Some(PointerKind::Thin)), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 8024c1a9a4e92..1d5d8dd13edd4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -1,137 +1,168 @@ //! Inference of closure parameter types based on the closure's expected type. -use std::{cmp, convert::Infallible, mem, ops::ControlFlow}; +pub(crate) mod analysis; + +use std::ops::ControlFlow; +use std::{iter, mem}; -use chalk_ir::{ - BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, - cast::Cast, - fold::{FallibleTypeFolder, Shift, TypeFoldable}, - visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, -}; -use either::Either; use hir_def::{ - DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId, - expr_store::path::Path, - hir::{ - Array, AsmOperand, BinaryOp, BindingId, CaptureBy, ClosureKind, Expr, ExprId, ExprOrPatId, - Pat, PatId, Statement, UnaryOp, - }, - item_tree::FieldsShape, + TraitId, + hir::{ClosureKind, ExprId, PatId}, lang_item::LangItem, - resolver::ValueNs, + type_ref::TypeRefId, +}; +use rustc_type_ir::{ + ClosureArgs, ClosureArgsParts, CoroutineArgs, CoroutineArgsParts, Interner, TypeSuperVisitable, + TypeVisitable, TypeVisitableExt, TypeVisitor, + inherent::{BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Ty as _}, }; -use hir_def::{Lookup, type_ref::TypeRefId}; -use hir_expand::name::Name; -use intern::sym; -use rustc_hash::{FxHashMap, FxHashSet}; -use smallvec::{SmallVec, smallvec}; -use stdx::{format_to, never}; -use syntax::utils::is_raw_identifier; +use tracing::debug; +use crate::traits::FnTrait; use crate::{ - Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy, - DynTyExt, FnAbi, FnPointer, FnSig, GenericArg, Interner, OpaqueTy, ProjectionTy, - ProjectionTyExt, Substitution, Ty, TyBuilder, TyExt, WhereClause, - db::{HirDatabase, InternedClosure, InternedCoroutine}, - error_lifetime, from_assoc_type_id, from_chalk_trait_id, from_placeholder_idx, - generics::Generics, - infer::{BreakableKind, CoerceMany, Diverges, coerce::CoerceNever}, - make_binders, - mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem}, - to_assoc_type_id, to_chalk_trait_id, - traits::FnTrait, - utils::{self, elaborate_clause_supertraits}, + FnAbi, + db::{InternedClosure, InternedCoroutine}, + infer::{BreakableKind, Diverges, coerce::CoerceMany}, + next_solver::{ + AliasTy, Binder, ClauseKind, DbInterner, ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig, + PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind, + abi::Safety, + infer::{ + BoundRegionConversionTime, DefineOpaqueTypes, InferOk, InferResult, + traits::{ObligationCause, PredicateObligations}, + }, + mapping::{ChalkToNextSolver, NextSolverToChalk}, + util::explicit_item_bounds, + }, }; use super::{Expectation, InferenceContext}; #[derive(Debug)] -pub(super) struct ClosureSignature { - pub(super) ret_ty: Ty, - pub(super) expected_sig: FnPointer, +struct ClosureSignatures<'tcx> { + /// The signature users of the closure see. + bound_sig: PolyFnSig<'tcx>, + /// The signature within the function body. + /// This mostly differs in the sense that lifetimes are now early bound and any + /// opaque types from the signature expectation are overridden in case there are + /// explicit hidden types written by the user in the closure signature. + liberated_sig: FnSig<'tcx>, } -impl InferenceContext<'_> { +impl<'db> InferenceContext<'db> { pub(super) fn infer_closure( &mut self, - body: &ExprId, + body: ExprId, args: &[PatId], - ret_type: &Option, + ret_type: Option, arg_types: &[Option], closure_kind: ClosureKind, tgt_expr: ExprId, expected: &Expectation, - ) -> Ty { + ) -> crate::Ty { assert_eq!(args.len(), arg_types.len()); + let interner = self.table.interner; let (expected_sig, expected_kind) = match expected.to_option(&mut self.table) { - Some(expected_ty) => self.deduce_closure_signature(&expected_ty, closure_kind), + Some(expected_ty) => { + self.deduce_closure_signature(expected_ty.to_nextsolver(interner), closure_kind) + } None => (None, None), }; - let ClosureSignature { expected_sig: bound_sig, ret_ty: body_ret_ty } = - self.sig_of_closure(body, ret_type, arg_types, closure_kind, expected_sig); - let bound_sig = self.normalize_associated_types_in(bound_sig); - let sig_ty = TyKind::Function(bound_sig.clone()).intern(Interner); + let ClosureSignatures { bound_sig, liberated_sig } = + self.sig_of_closure(arg_types, ret_type, expected_sig); + let body_ret_ty = bound_sig.output().skip_binder(); + let sig_ty = Ty::new_fn_ptr(interner, bound_sig); + let parent_args = GenericArgs::identity_for_item(interner, self.generic_def.into()); let (id, ty, resume_yield_tys) = match closure_kind { ClosureKind::Coroutine(_) => { - let sig_tys = bound_sig.substitution.0.as_slice(Interner); - // FIXME: report error when there are more than 1 parameter. - let resume_ty = match sig_tys.first() { - // When `sig_tys.len() == 1` the first type is the return type, not the - // first parameter type. - Some(ty) if sig_tys.len() > 1 => ty.assert_ty_ref(Interner).clone(), - _ => self.result.standard_types.unit.clone(), + let yield_ty = self.table.next_ty_var(); + let resume_ty = liberated_sig + .inputs() + .get(0) + .unwrap_or(self.result.standard_types.unit.to_nextsolver(interner)); + + // FIXME: Infer the upvars later. + let parts = CoroutineArgsParts { + parent_args, + kind_ty: Ty::new_unit(interner), + resume_ty, + yield_ty, + return_ty: body_ret_ty, + tupled_upvars_ty: Ty::new_unit(interner), }; - let yield_ty = self.table.new_type_var(); - - let subst = TyBuilder::subst_for_coroutine(self.db, self.owner) - .push(resume_ty.clone()) - .push(yield_ty.clone()) - .push(body_ret_ty.clone()) - .build(); let coroutine_id = self.db.intern_coroutine(InternedCoroutine(self.owner, tgt_expr)).into(); - let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner); + let coroutine_ty = Ty::new_coroutine( + interner, + coroutine_id, + CoroutineArgs::new(interner, parts).args, + ); - (None, coroutine_ty, Some((resume_ty, yield_ty))) + ( + None, + coroutine_ty, + Some((resume_ty.to_chalk(interner), yield_ty.to_chalk(interner))), + ) } + // FIXME(next-solver): `ClosureKind::Async` should really be a separate arm that creates a `CoroutineClosure`. + // But for now we treat it as a closure. ClosureKind::Closure | ClosureKind::Async => { - let closure_id = - self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into(); - let closure_ty = TyKind::Closure( - closure_id, - TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()), - ) - .intern(Interner); + let closure_id = self.db.intern_closure(InternedClosure(self.owner, tgt_expr)); + match expected_kind { + Some(kind) => { + self.result.closure_info.insert( + closure_id.into(), + ( + Vec::new(), + match kind { + rustc_type_ir::ClosureKind::Fn => FnTrait::Fn, + rustc_type_ir::ClosureKind::FnMut => FnTrait::FnMut, + rustc_type_ir::ClosureKind::FnOnce => FnTrait::FnOnce, + }, + ), + ); + } + None => {} + }; + // FIXME: Infer the kind and the upvars later when needed. + let parts = ClosureArgsParts { + parent_args, + closure_kind_ty: Ty::from_closure_kind( + interner, + expected_kind.unwrap_or(rustc_type_ir::ClosureKind::Fn), + ), + closure_sig_as_fn_ptr_ty: sig_ty, + tupled_upvars_ty: Ty::new_unit(interner), + }; + let closure_ty = Ty::new_closure( + interner, + closure_id.into(), + ClosureArgs::new(interner, parts).args, + ); self.deferred_closures.entry(closure_id).or_default(); self.add_current_closure_dependency(closure_id); (Some(closure_id), closure_ty, None) } }; - // Eagerly try to relate the closure type with the expected - // type, otherwise we often won't have enough information to - // infer the body. - self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected, expected_kind); - // Now go through the argument patterns - for (arg_pat, arg_ty) in args.iter().zip(bound_sig.substitution.0.as_slice(Interner).iter()) - { - self.infer_top_pat(*arg_pat, arg_ty.assert_ty_ref(Interner), None); + for (arg_pat, arg_ty) in args.iter().zip(bound_sig.skip_binder().inputs()) { + self.infer_top_pat(*arg_pat, &arg_ty.to_chalk(interner), None); } // FIXME: lift these out into a struct let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let prev_closure = mem::replace(&mut self.current_closure, id); - let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.clone()); + let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.to_chalk(interner)); let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty)); let prev_resume_yield_tys = mem::replace(&mut self.resume_yield_tys, resume_yield_tys); self.with_breakable_ctx(BreakableKind::Border, None, None, |this| { - this.infer_return(*body); + this.infer_return(body); }); self.diverges = prev_diverges; @@ -140,1691 +171,644 @@ impl InferenceContext<'_> { self.current_closure = prev_closure; self.resume_yield_tys = prev_resume_yield_tys; - self.table.normalize_associated_types_in(ty) + ty.to_chalk(interner) } - // This function handles both closures and coroutines. - pub(super) fn deduce_closure_type_from_expectations( - &mut self, - closure_expr: ExprId, - closure_ty: &Ty, - sig_ty: &Ty, - expectation: &Expectation, - expected_kind: Option, - ) { - let expected_ty = match expectation.to_option(&mut self.table) { - Some(ty) => ty, - None => return, - }; - - match (closure_ty.kind(Interner), expected_kind) { - (TyKind::Closure(closure_id, _), Some(closure_kind)) => { - self.result - .closure_info - .entry(*closure_id) - .or_insert_with(|| (Vec::new(), closure_kind)); - } - _ => {} - } - - // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here. - let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty, CoerceNever::Yes); - - // Coroutines are not Fn* so return early. - if matches!(closure_ty.kind(Interner), TyKind::Coroutine(..)) { - return; + fn fn_trait_kind_from_def_id(&self, trait_id: TraitId) -> Option { + let lang_item = self.db.lang_attr(trait_id.into())?; + match lang_item { + LangItem::Fn => Some(rustc_type_ir::ClosureKind::Fn), + LangItem::FnMut => Some(rustc_type_ir::ClosureKind::FnMut), + LangItem::FnOnce => Some(rustc_type_ir::ClosureKind::FnOnce), + _ => None, } + } - // Deduction based on the expected `dyn Fn` is done separately. - if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) - && let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) - { - let expected_sig_ty = TyKind::Function(sig).intern(Interner); - - self.unify(sig_ty, &expected_sig_ty); + fn async_fn_trait_kind_from_def_id( + &self, + trait_id: TraitId, + ) -> Option { + let lang_item = self.db.lang_attr(trait_id.into())?; + match lang_item { + LangItem::AsyncFn => Some(rustc_type_ir::ClosureKind::Fn), + LangItem::AsyncFnMut => Some(rustc_type_ir::ClosureKind::FnMut), + LangItem::AsyncFnOnce => Some(rustc_type_ir::ClosureKind::FnOnce), + _ => None, } } - // Closure kind deductions are mostly from `rustc_hir_typeck/src/closure.rs`. - // Might need to port closure sig deductions too. - pub(super) fn deduce_closure_signature( + /// Given the expected type, figures out what it can about this closure we + /// are about to type check: + fn deduce_closure_signature( &mut self, - expected_ty: &Ty, + expected_ty: Ty<'db>, closure_kind: ClosureKind, - ) -> (Option>, Option) { - match expected_ty.kind(Interner) { - TyKind::Alias(AliasTy::Opaque(OpaqueTy { .. })) | TyKind::OpaqueType(..) => { - let clauses = expected_ty.impl_trait_bounds(self.db).into_iter().flatten().map( - |b: chalk_ir::Binders>| { - b.into_value_and_skipped_binders().0 - }, - ); - self.deduce_closure_kind_from_predicate_clauses(expected_ty, clauses, closure_kind) - } - TyKind::Dyn(dyn_ty) => { - let sig = - dyn_ty.bounds.skip_binders().as_slice(Interner).iter().find_map(|bound| { - if let WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(projection_ty), - ty: projected_ty, - }) = bound.skip_binders() - && let Some(sig) = self.deduce_sig_from_projection( - closure_kind, - projection_ty, - projected_ty, - ) - { - return Some(sig); - } - None - }); - - let kind = dyn_ty.principal().and_then(|principal_trait_ref| { - self.fn_trait_kind_from_trait_id(from_chalk_trait_id( - principal_trait_ref.skip_binders().skip_binders().trait_id, - )) + ) -> (Option>, Option) { + match expected_ty.kind() { + TyKind::Alias(rustc_type_ir::Opaque, AliasTy { def_id, args, .. }) => self + .deduce_closure_signature_from_predicates( + expected_ty, + closure_kind, + explicit_item_bounds(self.table.interner, def_id) + .iter_instantiated(self.table.interner, args) + .map(|clause| clause.as_predicate()), + ), + TyKind::Dynamic(object_type, ..) => { + let sig = object_type.projection_bounds().into_iter().find_map(|pb| { + let pb = + pb.with_self_ty(self.table.interner, Ty::new_unit(self.table.interner)); + self.deduce_sig_from_projection(closure_kind, pb) }); - + let kind = object_type + .principal_def_id() + .and_then(|did| self.fn_trait_kind_from_def_id(did.0)); (sig, kind) } - TyKind::InferenceVar(ty, chalk_ir::TyVariableKind::General) => { - let clauses = self.clauses_for_self_ty(*ty); - self.deduce_closure_kind_from_predicate_clauses( - expected_ty, - clauses.into_iter(), + TyKind::Infer(rustc_type_ir::TyVar(vid)) => self + .deduce_closure_signature_from_predicates( + Ty::new_var(self.table.interner, self.table.infer_ctxt.root_var(vid)), closure_kind, - ) - } - TyKind::Function(fn_ptr) => match closure_kind { - ClosureKind::Closure => (Some(fn_ptr.substitution.clone()), Some(FnTrait::Fn)), - ClosureKind::Async | ClosureKind::Coroutine(_) => (None, None), + self.table.obligations_for_self_ty(vid).into_iter().map(|obl| obl.predicate), + ), + TyKind::FnPtr(sig_tys, hdr) => match closure_kind { + ClosureKind::Closure => { + let expected_sig = sig_tys.with(hdr); + (Some(expected_sig), Some(rustc_type_ir::ClosureKind::Fn)) + } + ClosureKind::Coroutine(_) | ClosureKind::Async => (None, None), }, _ => (None, None), } } - fn deduce_closure_kind_from_predicate_clauses( + fn deduce_closure_signature_from_predicates( &mut self, - expected_ty: &Ty, - clauses: impl DoubleEndedIterator, + expected_ty: Ty<'db>, closure_kind: ClosureKind, - ) -> (Option>, Option) { + predicates: impl DoubleEndedIterator>, + ) -> (Option>, Option) { let mut expected_sig = None; let mut expected_kind = None; - for clause in elaborate_clause_supertraits(self.db, clauses.rev()) { + for pred in rustc_type_ir::elaborate::elaborate( + self.table.interner, + // Reverse the obligations here, since `elaborate_*` uses a stack, + // and we want to keep inference generally in the same order of + // the registered obligations. + predicates.rev(), + ) + // We only care about self bounds + .filter_only_self() + { + debug!(?pred); + let bound_predicate = pred.kind(); + + // Given a Projection predicate, we can potentially infer + // the complete signature. if expected_sig.is_none() - && let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) = - &clause + && let PredicateKind::Clause(ClauseKind::Projection(proj_predicate)) = + bound_predicate.skip_binder() { - let inferred_sig = self.deduce_sig_from_projection(closure_kind, projection, ty); + let inferred_sig = self.deduce_sig_from_projection( + closure_kind, + bound_predicate.rebind(proj_predicate), + ); + // Make sure that we didn't infer a signature that mentions itself. // This can happen when we elaborate certain supertrait bounds that - // mention projections containing the `Self` type. See rust-lang/rust#105401. - struct MentionsTy<'a> { - expected_ty: &'a Ty, + // mention projections containing the `Self` type. See #105401. + struct MentionsTy<'db> { + expected_ty: Ty<'db>, } - impl TypeVisitor for MentionsTy<'_> { - type BreakTy = (); - - fn interner(&self) -> Interner { - Interner - } + impl<'db> TypeVisitor> for MentionsTy<'db> { + type Result = ControlFlow<()>; - fn as_dyn( - &mut self, - ) -> &mut dyn TypeVisitor - { - self - } - - fn visit_ty(&mut self, t: &Ty, db: chalk_ir::DebruijnIndex) -> ControlFlow<()> { + fn visit_ty(&mut self, t: Ty<'db>) -> Self::Result { if t == self.expected_ty { ControlFlow::Break(()) } else { - t.super_visit_with(self, db) + t.super_visit_with(self) } } } - if inferred_sig - .visit_with(&mut MentionsTy { expected_ty }, chalk_ir::DebruijnIndex::INNERMOST) - .is_continue() - { - expected_sig = inferred_sig; - } - } - let trait_id = match clause { - WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(projection), .. - }) => projection.trait_(self.db), - WhereClause::Implemented(trait_ref) => from_chalk_trait_id(trait_ref.trait_id), - _ => continue, - }; - if let Some(closure_kind) = self.fn_trait_kind_from_trait_id(trait_id) { - // always use the closure kind that is more permissive. - match (expected_kind, closure_kind) { - (None, _) => expected_kind = Some(closure_kind), - (Some(FnTrait::FnMut), FnTrait::Fn) => expected_kind = Some(FnTrait::Fn), - (Some(FnTrait::FnOnce), FnTrait::Fn | FnTrait::FnMut) => { - expected_kind = Some(closure_kind) + // Don't infer a closure signature from a goal that names the closure type as this will + // (almost always) lead to occurs check errors later in type checking. + if let Some(inferred_sig) = inferred_sig { + // In the new solver it is difficult to explicitly normalize the inferred signature as we + // would have to manually handle universes and rewriting bound vars and placeholders back + // and forth. + // + // Instead we take advantage of the fact that we relating an inference variable with an alias + // will only instantiate the variable if the alias is rigid(*not quite). Concretely we: + // - Create some new variable `?sig` + // - Equate `?sig` with the unnormalized signature, e.g. `fn( as Trait>::Assoc)` + // - Depending on whether ` as Trait>::Assoc` is rigid, ambiguous or normalizeable, + // we will either wind up with `?sig= as Trait>::Assoc/?y/ConcreteTy` respectively. + // + // *: In cases where there are ambiguous aliases in the signature that make use of bound vars + // they will wind up present in `?sig` even though they are non-rigid. + // + // This is a bit weird and means we may wind up discarding the goal due to it naming `expected_ty` + // even though the normalized form may not name `expected_ty`. However, this matches the existing + // behaviour of the old solver and would be technically a breaking change to fix. + let generalized_fnptr_sig = self.table.next_ty_var(); + let inferred_fnptr_sig = Ty::new_fn_ptr(self.table.interner, inferred_sig); + // FIXME: Report diagnostics. + _ = self + .table + .infer_ctxt + .at(&ObligationCause::new(), self.table.param_env) + .eq(DefineOpaqueTypes::Yes, inferred_fnptr_sig, generalized_fnptr_sig) + .map(|infer_ok| self.table.register_infer_ok(infer_ok)); + + let resolved_sig = + self.table.infer_ctxt.resolve_vars_if_possible(generalized_fnptr_sig); + + if resolved_sig.visit_with(&mut MentionsTy { expected_ty }).is_continue() { + expected_sig = Some(resolved_sig.fn_sig(self.table.interner)); } - _ => {} + } else if inferred_sig.visit_with(&mut MentionsTy { expected_ty }).is_continue() { + expected_sig = inferred_sig; } } - } - - (expected_sig, expected_kind) - } - fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option { - // Search for a predicate like `<$self as FnX>::Output == Ret` - - let fn_traits: SmallVec<[ChalkTraitId; 3]> = - utils::fn_traits(self.db, self.owner.module(self.db).krate()) - .map(to_chalk_trait_id) - .collect(); - - let self_ty = self.result.standard_types.unknown.clone(); - let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]); - for bound in bounds.iter(Interner) { - // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer` - if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) = - bound.skip_binders() - { - let assoc_data = - self.db.associated_ty_data(from_assoc_type_id(projection.associated_ty_id)); - if !fn_traits.contains(&assoc_data.trait_id) { - return None; + // Even if we can't infer the full signature, we may be able to + // infer the kind. This can occur when we elaborate a predicate + // like `F : Fn`. Note that due to subtyping we could encounter + // many viable options, so pick the most restrictive. + let trait_def_id = match bound_predicate.skip_binder() { + PredicateKind::Clause(ClauseKind::Projection(data)) => { + Some(data.projection_term.trait_def_id(self.table.interner).0) } + PredicateKind::Clause(ClauseKind::Trait(data)) => Some(data.def_id().0), + _ => None, + }; + + if let Some(trait_def_id) = trait_def_id { + let found_kind = match closure_kind { + ClosureKind::Closure => self.fn_trait_kind_from_def_id(trait_def_id), + ClosureKind::Async => self + .async_fn_trait_kind_from_def_id(trait_def_id) + .or_else(|| self.fn_trait_kind_from_def_id(trait_def_id)), + _ => None, + }; - // Skip `Self`, get the type argument. - let arg = projection.substitution.as_slice(Interner).get(1)?; - if let Some(subst) = arg.ty(Interner)?.as_tuple() { - let generic_args = subst.as_slice(Interner); - let mut sig_tys = Vec::with_capacity(generic_args.len() + 1); - for arg in generic_args { - sig_tys.push(arg.ty(Interner)?.clone()); + if let Some(found_kind) = found_kind { + // always use the closure kind that is more permissive. + match (expected_kind, found_kind) { + (None, _) => expected_kind = Some(found_kind), + ( + Some(rustc_type_ir::ClosureKind::FnMut), + rustc_type_ir::ClosureKind::Fn, + ) => expected_kind = Some(rustc_type_ir::ClosureKind::Fn), + ( + Some(rustc_type_ir::ClosureKind::FnOnce), + rustc_type_ir::ClosureKind::Fn | rustc_type_ir::ClosureKind::FnMut, + ) => expected_kind = Some(found_kind), + _ => {} } - sig_tys.push(ty.clone()); - - cov_mark::hit!(dyn_fn_param_informs_call_site_closure_signature); - return Some(FnPointer { - num_binders: bound.len(Interner), - sig: FnSig { - abi: FnAbi::RustCall, - safety: chalk_ir::Safety::Safe, - variadic: false, - }, - substitution: FnSubst(Substitution::from_iter(Interner, sig_tys)), - }); } } } - None + (expected_sig, expected_kind) } + /// Given a projection like "::Result == Y", we can deduce + /// everything we need to know about a closure or coroutine. + /// + /// The `cause_span` should be the span that caused us to + /// have this expected signature, or `None` if we can't readily + /// know that. fn deduce_sig_from_projection( &mut self, closure_kind: ClosureKind, - projection_ty: &ProjectionTy, - projected_ty: &Ty, - ) -> Option> { - let container = - from_assoc_type_id(projection_ty.associated_ty_id).lookup(self.db).container; - let trait_ = match container { - hir_def::ItemContainerId::TraitId(trait_) => trait_, - _ => return None, - }; + projection: PolyProjectionPredicate<'db>, + ) -> Option> { + let SolverDefId::TypeAliasId(def_id) = projection.item_def_id() else { unreachable!() }; + let lang_item = self.db.lang_attr(def_id.into()); // For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits, // for closures and async closures, respectively. - let fn_trait_kind = self.fn_trait_kind_from_trait_id(trait_)?; - if !matches!(closure_kind, ClosureKind::Closure | ClosureKind::Async) { - return None; - } - if fn_trait_kind.is_async() { - // If the expected trait is `AsyncFn(...) -> X`, we don't know what the return type is, - // but we do know it must implement `Future`. - self.extract_async_fn_sig_from_projection(projection_ty, projected_ty) - } else { - self.extract_sig_from_projection(projection_ty, projected_ty) + match closure_kind { + ClosureKind::Closure if lang_item == Some(LangItem::FnOnceOutput) => { + self.extract_sig_from_projection(projection) + } + ClosureKind::Async if lang_item == Some(LangItem::AsyncFnOnceOutput) => { + self.extract_sig_from_projection(projection) + } + // It's possible we've passed the closure to a (somewhat out-of-fashion) + // `F: FnOnce() -> Fut, Fut: Future` style bound. Let's still + // guide inference here, since it's beneficial for the user. + ClosureKind::Async if lang_item == Some(LangItem::FnOnceOutput) => { + self.extract_sig_from_projection_and_future_bound(projection) + } + _ => None, } } + /// Given an `FnOnce::Output` or `AsyncFn::Output` projection, extract the args + /// and return type to infer a [`ty::PolyFnSig`] for the closure. fn extract_sig_from_projection( &self, - projection_ty: &ProjectionTy, - projected_ty: &Ty, - ) -> Option> { - let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner); - - let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else { - return None; - }; - - let ret_param_ty = projected_ty; + projection: PolyProjectionPredicate<'db>, + ) -> Option> { + let projection = self.table.infer_ctxt.resolve_vars_if_possible(projection); - Some(FnSubst(Substitution::from_iter( - Interner, - input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new( - Interner, - chalk_ir::GenericArgData::Ty(ret_param_ty.clone()), - ))), - ))) - } - - fn extract_async_fn_sig_from_projection( - &mut self, - projection_ty: &ProjectionTy, - projected_ty: &Ty, - ) -> Option> { - let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner); + let arg_param_ty = projection.skip_binder().projection_term.args.type_at(1); + debug!(?arg_param_ty); - let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else { + let TyKind::Tuple(input_tys) = arg_param_ty.kind() else { return None; }; - let ret_param_future_output = projected_ty; - let ret_param_future = self.table.new_type_var(); - let future_output = - LangItem::FutureOutput.resolve_type_alias(self.db, self.resolver.krate())?; - let future_projection = crate::AliasTy::Projection(crate::ProjectionTy { - associated_ty_id: to_assoc_type_id(future_output), - substitution: Substitution::from1(Interner, ret_param_future.clone()), - }); - self.table.register_obligation( - crate::AliasEq { alias: future_projection, ty: ret_param_future_output.clone() } - .cast(Interner), - ); - - Some(FnSubst(Substitution::from_iter( - Interner, - input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new( - Interner, - chalk_ir::GenericArgData::Ty(ret_param_future), - ))), - ))) - } + // Since this is a return parameter type it is safe to unwrap. + let ret_param_ty = projection.skip_binder().term.expect_type(); + debug!(?ret_param_ty); + + let sig = projection.rebind(self.table.interner.mk_fn_sig( + input_tys, + ret_param_ty, + false, + Safety::Safe, + FnAbi::Rust, + )); - fn fn_trait_kind_from_trait_id(&self, trait_id: hir_def::TraitId) -> Option { - FnTrait::from_lang_item(self.db.lang_attr(trait_id.into())?) + Some(sig) } - fn supplied_sig_of_closure( + /// When an async closure is passed to a function that has a "two-part" `Fn` + /// and `Future` trait bound, like: + /// + /// ```rust + /// use std::future::Future; + /// + /// fn not_exactly_an_async_closure(_f: F) + /// where + /// F: FnOnce(String, u32) -> Fut, + /// Fut: Future, + /// {} + /// ``` + /// + /// The we want to be able to extract the signature to guide inference in the async + /// closure. We will have two projection predicates registered in this case. First, + /// we identify the `FnOnce` bound, and if the output type is + /// an inference variable `?Fut`, we check if that is bounded by a `Future` + /// projection. + /// + /// This function is actually best-effort with the return type; if we don't find a + /// `Future` projection, we still will return arguments that we extracted from the `FnOnce` + /// projection, and the output will be an unconstrained type variable instead. + fn extract_sig_from_projection_and_future_bound( &mut self, - body: &ExprId, - ret_type: &Option, - arg_types: &[Option], - closure_kind: ClosureKind, - ) -> ClosureSignature { - let mut sig_tys = Vec::with_capacity(arg_types.len() + 1); - - // collect explicitly written argument types - for arg_type in arg_types.iter() { - let arg_ty = match arg_type { - // FIXME: I think rustc actually lowers closure params with `LifetimeElisionKind::AnonymousCreateParameter` - // (but the return type with infer). - Some(type_ref) => self.make_body_ty(*type_ref), - None => self.table.new_type_var(), - }; - sig_tys.push(arg_ty); - } + projection: PolyProjectionPredicate<'db>, + ) -> Option> { + let projection = self.table.infer_ctxt.resolve_vars_if_possible(projection); - // add return type - let ret_ty = match ret_type { - Some(type_ref) => self.make_body_ty(*type_ref), - None => self.table.new_type_var(), - }; - if let ClosureKind::Async = closure_kind { - sig_tys.push(self.lower_async_block_type_impl_trait(ret_ty.clone(), *body)); - } else { - sig_tys.push(ret_ty.clone()); - } + let arg_param_ty = projection.skip_binder().projection_term.args.type_at(1); + debug!(?arg_param_ty); - let expected_sig = FnPointer { - num_binders: 0, - sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false }, - substitution: FnSubst( - Substitution::from_iter(Interner, sig_tys.iter().cloned()).shifted_in(Interner), - ), + let TyKind::Tuple(input_tys) = arg_param_ty.kind() else { + return None; }; - ClosureSignature { ret_ty, expected_sig } - } + // If the return type is a type variable, look for bounds on it. + // We could theoretically support other kinds of return types here, + // but none of them would be useful, since async closures return + // concrete anonymous future types, and their futures are not coerced + // into any other type within the body of the async closure. + let TyKind::Infer(rustc_type_ir::TyVar(return_vid)) = + projection.skip_binder().term.expect_type().kind() + else { + return None; + }; - /// The return type is the signature of the closure, and the return type - /// *as represented inside the body* (so, for async closures, the `Output` ty) - pub(super) fn sig_of_closure( + // FIXME: We may want to elaborate here, though I assume this will be exceedingly rare. + let mut return_ty = None; + for bound in self.table.obligations_for_self_ty(return_vid) { + if let PredicateKind::Clause(ClauseKind::Projection(ret_projection)) = + bound.predicate.kind().skip_binder() + && let ret_projection = bound.predicate.kind().rebind(ret_projection) + && let Some(ret_projection) = ret_projection.no_bound_vars() + && let SolverDefId::TypeAliasId(assoc_type) = ret_projection.def_id() + && self.db.lang_attr(assoc_type.into()) == Some(LangItem::FutureOutput) + { + return_ty = Some(ret_projection.term.expect_type()); + break; + } + } + + // SUBTLE: If we didn't find a `Future` bound for the return + // vid, we still want to attempt to provide inference guidance for the async + // closure's arguments. Instantiate a new vid to plug into the output type. + // + // You may be wondering, what if it's higher-ranked? Well, given that we + // found a type variable for the `FnOnce::Output` projection above, we know + // that the output can't mention any of the vars. + // + // Also note that we use a fresh var here for the signature since the signature + // records the output of the *future*, and `return_vid` above is the type + // variable of the future, not its output. + // + // FIXME: We probably should store this signature inference output in a way + // that does not misuse a `FnSig` type, but that can be done separately. + let return_ty = return_ty.unwrap_or_else(|| self.table.next_ty_var()); + + let sig = projection.rebind(self.table.interner.mk_fn_sig( + input_tys, + return_ty, + false, + Safety::Safe, + FnAbi::Rust, + )); + + Some(sig) + } + + fn sig_of_closure( &mut self, - body: &ExprId, - ret_type: &Option, - arg_types: &[Option], - closure_kind: ClosureKind, - expected_sig: Option>, - ) -> ClosureSignature { + decl_inputs: &[Option], + decl_output: Option, + expected_sig: Option>, + ) -> ClosureSignatures<'db> { if let Some(e) = expected_sig { - self.sig_of_closure_with_expectation(body, ret_type, arg_types, closure_kind, e) + self.sig_of_closure_with_expectation(decl_inputs, decl_output, e) } else { - self.sig_of_closure_no_expectation(body, ret_type, arg_types, closure_kind) + self.sig_of_closure_no_expectation(decl_inputs, decl_output) } } + /// If there is no expected signature, then we will convert the + /// types that the user gave into a signature. fn sig_of_closure_no_expectation( &mut self, - body: &ExprId, - ret_type: &Option, - arg_types: &[Option], - closure_kind: ClosureKind, - ) -> ClosureSignature { - self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind) - } - - fn sig_of_closure_with_expectation( - &mut self, - body: &ExprId, - ret_type: &Option, - arg_types: &[Option], - closure_kind: ClosureKind, - expected_sig: FnSubst, - ) -> ClosureSignature { - let expected_sig = FnPointer { - num_binders: 0, - sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false }, - substitution: expected_sig, - }; - - // If the expected signature does not match the actual arg types, - // then just return the expected signature - if expected_sig.substitution.0.len(Interner) != arg_types.len() + 1 { - let ret_ty = match ret_type { - Some(type_ref) => self.make_body_ty(*type_ref), - None => self.table.new_type_var(), - }; - return ClosureSignature { expected_sig, ret_ty }; - } - - self.merge_supplied_sig_with_expectation( - body, - ret_type, - arg_types, - closure_kind, - expected_sig, - ) - } - - fn merge_supplied_sig_with_expectation( - &mut self, - body: &ExprId, - ret_type: &Option, - arg_types: &[Option], - closure_kind: ClosureKind, - expected_sig: FnPointer, - ) -> ClosureSignature { - let supplied_sig = self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind); - - let snapshot = self.table.snapshot(); - if !self.table.unify(&expected_sig.substitution, &supplied_sig.expected_sig.substitution) { - self.table.rollback_to(snapshot); - } - - supplied_sig - } -} - -// The below functions handle capture and closure kind (Fn, FnMut, ..) - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub(crate) struct HirPlace { - pub(crate) local: BindingId, - pub(crate) projections: Vec>, -} - -impl HirPlace { - fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty { - let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone()); - for p in &self.projections { - ty = p.projected_ty( - ty, - ctx.db, - |_, _, _| { - unreachable!("Closure field only happens in MIR"); - }, - ctx.owner.module(ctx.db).krate(), - ); - } - ty - } + decl_inputs: &[Option], + decl_output: Option, + ) -> ClosureSignatures<'db> { + let bound_sig = self.supplied_sig_of_closure(decl_inputs, decl_output); - fn capture_kind_of_truncated_place( - &self, - mut current_capture: CaptureKind, - len: usize, - ) -> CaptureKind { - if let CaptureKind::ByRef(BorrowKind::Mut { - kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow, - }) = current_capture - && self.projections[len..].contains(&ProjectionElem::Deref) - { - current_capture = - CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }); - } - current_capture + self.closure_sigs(bound_sig) } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -pub enum CaptureKind { - ByRef(BorrowKind), - ByValue, -} -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct CapturedItem { - pub(crate) place: HirPlace, - pub(crate) kind: CaptureKind, - /// The inner vec is the stacks; the outer vec is for each capture reference. + /// Invoked to compute the signature of a closure expression. This + /// combines any user-provided type annotations (e.g., `|x: u32| + /// -> u32 { .. }`) with the expected signature. /// - /// Even though we always report only the last span (i.e. the most inclusive span), - /// we need to keep them all, since when a closure occurs inside a closure, we - /// copy all captures of the inner closure to the outer closure, and then we may - /// truncate them, and we want the correct span to be reported. - span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, - pub(crate) ty: Binders, -} - -impl CapturedItem { - pub fn local(&self) -> BindingId { - self.place.local - } - - /// Returns whether this place has any field (aka. non-deref) projections. - pub fn has_field_projections(&self) -> bool { - self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref)) - } - - pub fn ty(&self, subst: &Substitution) -> Ty { - self.ty.clone().substitute(Interner, utils::ClosureSubst(subst).parent_subst()) - } - - pub fn kind(&self) -> CaptureKind { - self.kind - } - - pub fn spans(&self) -> SmallVec<[MirSpan; 3]> { - self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect() - } - - /// Converts the place to a name that can be inserted into source code. - pub fn place_to_name(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { - let body = db.body(owner); - let mut result = body[self.place.local].name.as_str().to_owned(); - for proj in &self.place.projections { - match proj { - ProjectionElem::Deref => {} - ProjectionElem::Field(Either::Left(f)) => { - let variant_data = f.parent.fields(db); - match variant_data.shape { - FieldsShape::Record => { - result.push('_'); - result.push_str(variant_data.fields()[f.local_id].name.as_str()) - } - FieldsShape::Tuple => { - let index = - variant_data.fields().iter().position(|it| it.0 == f.local_id); - if let Some(index) = index { - format_to!(result, "_{index}"); - } - } - FieldsShape::Unit => {} - } - } - ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index), - &ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"), - ProjectionElem::Index(_) - | ProjectionElem::ConstantIndex { .. } - | ProjectionElem::Subslice { .. } - | ProjectionElem::OpaqueCast(_) => { - never!("Not happen in closure capture"); - continue; - } - } - } - if is_raw_identifier(&result, owner.module(db).krate().data(db).edition) { - result.insert_str(0, "r#"); - } - result - } - - pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { - let body = db.body(owner); - let krate = owner.krate(db); - let edition = krate.data(db).edition; - let mut result = body[self.place.local].name.display(db, edition).to_string(); - for proj in &self.place.projections { - match proj { - // In source code autoderef kicks in. - ProjectionElem::Deref => {} - ProjectionElem::Field(Either::Left(f)) => { - let variant_data = f.parent.fields(db); - match variant_data.shape { - FieldsShape::Record => format_to!( - result, - ".{}", - variant_data.fields()[f.local_id].name.display(db, edition) - ), - FieldsShape::Tuple => format_to!( - result, - ".{}", - variant_data - .fields() - .iter() - .position(|it| it.0 == f.local_id) - .unwrap_or_default() - ), - FieldsShape::Unit => {} - } - } - ProjectionElem::Field(Either::Right(f)) => { - let field = f.index; - format_to!(result, ".{field}"); - } - &ProjectionElem::ClosureField(field) => { - format_to!(result, ".{field}"); - } - ProjectionElem::Index(_) - | ProjectionElem::ConstantIndex { .. } - | ProjectionElem::Subslice { .. } - | ProjectionElem::OpaqueCast(_) => { - never!("Not happen in closure capture"); - continue; - } - } - } - let final_derefs_count = self - .place - .projections - .iter() - .rev() - .take_while(|proj| matches!(proj, ProjectionElem::Deref)) - .count(); - result.insert_str(0, &"*".repeat(final_derefs_count)); - result - } - - pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { - let body = db.body(owner); - let krate = owner.krate(db); - let edition = krate.data(db).edition; - let mut result = body[self.place.local].name.display(db, edition).to_string(); - let mut field_need_paren = false; - for proj in &self.place.projections { - match proj { - ProjectionElem::Deref => { - result = format!("*{result}"); - field_need_paren = true; - } - ProjectionElem::Field(Either::Left(f)) => { - if field_need_paren { - result = format!("({result})"); - } - let variant_data = f.parent.fields(db); - let field = match variant_data.shape { - FieldsShape::Record => { - variant_data.fields()[f.local_id].name.as_str().to_owned() - } - FieldsShape::Tuple => variant_data - .fields() - .iter() - .position(|it| it.0 == f.local_id) - .unwrap_or_default() - .to_string(), - FieldsShape::Unit => "[missing field]".to_owned(), - }; - result = format!("{result}.{field}"); - field_need_paren = false; - } - ProjectionElem::Field(Either::Right(f)) => { - let field = f.index; - if field_need_paren { - result = format!("({result})"); - } - result = format!("{result}.{field}"); - field_need_paren = false; - } - &ProjectionElem::ClosureField(field) => { - if field_need_paren { - result = format!("({result})"); - } - result = format!("{result}.{field}"); - field_need_paren = false; - } - ProjectionElem::Index(_) - | ProjectionElem::ConstantIndex { .. } - | ProjectionElem::Subslice { .. } - | ProjectionElem::OpaqueCast(_) => { - never!("Not happen in closure capture"); - continue; - } - } - } - result - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct CapturedItemWithoutTy { - pub(crate) place: HirPlace, - pub(crate) kind: CaptureKind, - /// The inner vec is the stacks; the outer vec is for each capture reference. - pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, -} - -impl CapturedItemWithoutTy { - fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem { - let ty = self.place.ty(ctx); - let ty = match &self.kind { - CaptureKind::ByValue => ty, - CaptureKind::ByRef(bk) => { - let m = match bk { - BorrowKind::Mut { .. } => Mutability::Mut, - _ => Mutability::Not, - }; - TyKind::Ref(m, error_lifetime(), ty).intern(Interner) - } - }; - return CapturedItem { - place: self.place, - kind: self.kind, - span_stacks: self.span_stacks, - ty: replace_placeholder_with_binder(ctx, ty), - }; - - fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders { - struct Filler<'a> { - db: &'a dyn HirDatabase, - generics: &'a Generics, - } - impl FallibleTypeFolder for Filler<'_> { - type Error = (); - - fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder { - self - } - - fn interner(&self) -> Interner { - Interner - } - - fn try_fold_free_placeholder_const( - &mut self, - ty: chalk_ir::Ty, - idx: chalk_ir::PlaceholderIndex, - outer_binder: DebruijnIndex, - ) -> Result, Self::Error> { - let x = from_placeholder_idx(self.db, idx); - let Some(idx) = self.generics.type_or_const_param_idx(x) else { - return Err(()); - }; - Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty)) - } - - fn try_fold_free_placeholder_ty( - &mut self, - idx: chalk_ir::PlaceholderIndex, - outer_binder: DebruijnIndex, - ) -> std::result::Result { - let x = from_placeholder_idx(self.db, idx); - let Some(idx) = self.generics.type_or_const_param_idx(x) else { - return Err(()); - }; - Ok(BoundVar::new(outer_binder, idx).to_ty(Interner)) - } - } - let filler = &mut Filler { db: ctx.db, generics: ctx.generics() }; - let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty); - make_binders(ctx.db, filler.generics, result) - } - } -} - -impl InferenceContext<'_> { - fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option { - let r = self.place_of_expr_without_adjust(tgt_expr)?; - let adjustments = - self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default(); - apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments) - } - - /// Pushes the span into `current_capture_span_stack`, *without clearing it first*. - fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option { - if path.type_anchor().is_some() { - return None; - } - let hygiene = self.body.expr_or_pat_path_hygiene(id); - self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| { - match result { - ValueNs::LocalBinding(binding) => { - let mir_span = match id { - ExprOrPatId::ExprId(id) => MirSpan::ExprId(id), - ExprOrPatId::PatId(id) => MirSpan::PatId(id), - }; - self.current_capture_span_stack.push(mir_span); - Some(HirPlace { local: binding, projections: Vec::new() }) - } - _ => None, - } - }) - } + /// The approach is as follows: + /// + /// - Let `S` be the (higher-ranked) signature that we derive from the user's annotations. + /// - Let `E` be the (higher-ranked) signature that we derive from the expectations, if any. + /// - If we have no expectation `E`, then the signature of the closure is `S`. + /// - Otherwise, the signature of the closure is E. Moreover: + /// - Skolemize the late-bound regions in `E`, yielding `E'`. + /// - Instantiate all the late-bound regions bound in the closure within `S` + /// with fresh (existential) variables, yielding `S'` + /// - Require that `E' = S'` + /// - We could use some kind of subtyping relationship here, + /// I imagine, but equality is easier and works fine for + /// our purposes. + /// + /// The key intuition here is that the user's types must be valid + /// from "the inside" of the closure, but the expectation + /// ultimately drives the overall signature. + /// + /// # Examples + /// + /// ```ignore (illustrative) + /// fn with_closure(_: F) + /// where F: Fn(&u32) -> &u32 { .. } + /// + /// with_closure(|x: &u32| { ... }) + /// ``` + /// + /// Here: + /// - E would be `fn(&u32) -> &u32`. + /// - S would be `fn(&u32) -> ?T` + /// - E' is `&'!0 u32 -> &'!0 u32` + /// - S' is `&'?0 u32 -> ?T` + /// + /// S' can be unified with E' with `['?0 = '!0, ?T = &'!10 u32]`. + /// + /// # Arguments + /// + /// - `expr_def_id`: the `LocalDefId` of the closure expression + /// - `decl`: the HIR declaration of the closure + /// - `body`: the body of the closure + /// - `expected_sig`: the expected signature (if any). Note that + /// this is missing a binder: that is, there may be late-bound + /// regions with depth 1, which are bound then by the closure. + fn sig_of_closure_with_expectation( + &mut self, + decl_inputs: &[Option], + decl_output: Option, + expected_sig: PolyFnSig<'db>, + ) -> ClosureSignatures<'db> { + // Watch out for some surprises and just ignore the + // expectation if things don't see to match up with what we + // expect. + if expected_sig.c_variadic() { + return self.sig_of_closure_no_expectation(decl_inputs, decl_output); + } else if expected_sig.skip_binder().inputs_and_output.len() != decl_inputs.len() + 1 { + return self + .sig_of_closure_with_mismatched_number_of_arguments(decl_inputs, decl_output); + } + + // Create a `PolyFnSig`. Note the oddity that late bound + // regions appearing free in `expected_sig` are now bound up + // in this binder we are creating. + assert!(!expected_sig.skip_binder().has_vars_bound_above(rustc_type_ir::INNERMOST)); + let bound_sig = expected_sig.map_bound(|sig| { + self.table.interner.mk_fn_sig( + sig.inputs(), + sig.output(), + sig.c_variadic, + Safety::Safe, + FnAbi::RustCall, + ) + }); - /// Changes `current_capture_span_stack` to contain the stack of spans for this expr. - fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option { - self.current_capture_span_stack.clear(); - match &self.body[tgt_expr] { - Expr::Path(p) => { - let resolver_guard = - self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr); - let result = self.path_place(p, tgt_expr.into()); - self.resolver.reset_to_guard(resolver_guard); - return result; - } - Expr::Field { expr, name: _ } => { - let mut place = self.place_of_expr(*expr)?; - let field = self.result.field_resolution(tgt_expr)?; - self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); - place.projections.push(ProjectionElem::Field(field)); - return Some(place); - } - Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - if matches!( - self.expr_ty_after_adjustments(*expr).kind(Interner), - TyKind::Ref(..) | TyKind::Raw(..) - ) { - let mut place = self.place_of_expr(*expr)?; - self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); - place.projections.push(ProjectionElem::Deref); - return Some(place); - } - } - _ => (), - } - None - } + // `deduce_expectations_from_expected_type` introduces + // late-bound lifetimes defined elsewhere, which we now + // anonymize away, so as not to confuse the user. + let bound_sig = self.table.interner.anonymize_bound_vars(bound_sig); - fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) { - self.current_captures.push(CapturedItemWithoutTy { - place, - kind, - span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()], - }); - } + let closure_sigs = self.closure_sigs(bound_sig); - fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) { - // The first span is the identifier, and it must always remain. - truncate_to += 1; - for span_stack in &mut capture.span_stacks { - let mut remained = truncate_to; - let mut actual_truncate_to = 0; - for &span in &*span_stack { - actual_truncate_to += 1; - if !span.is_ref_span(self.body) { - remained -= 1; - if remained == 0 { - break; - } - } - } - if actual_truncate_to < span_stack.len() - && span_stack[actual_truncate_to].is_ref_span(self.body) - { - // Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect. - actual_truncate_to += 1; - } - span_stack.truncate(actual_truncate_to); + // Up till this point, we have ignored the annotations that the user + // gave. This function will check that they unify successfully. + // Along the way, it also writes out entries for types that the user + // wrote into our typeck results, which are then later used by the privacy + // check. + match self.merge_supplied_sig_with_expectation(decl_inputs, decl_output, closure_sigs) { + Ok(infer_ok) => self.table.register_infer_ok(infer_ok), + Err(_) => self.sig_of_closure_no_expectation(decl_inputs, decl_output), } } - fn ref_expr(&mut self, expr: ExprId, place: Option) { - if let Some(place) = place { - self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared)); - } - self.walk_expr(expr); - } + fn sig_of_closure_with_mismatched_number_of_arguments( + &mut self, + decl_inputs: &[Option], + decl_output: Option, + ) -> ClosureSignatures<'db> { + let error_sig = self.error_sig_of_closure(decl_inputs, decl_output); - fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) { - if self.is_upvar(&place) { - self.push_capture(place, kind); - } + self.closure_sigs(error_sig) } - fn mutate_path_pat(&mut self, path: &Path, id: PatId) { - if let Some(place) = self.path_place(path, id.into()) { - self.add_capture( - place, - CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), + /// Enforce the user's types against the expectation. See + /// `sig_of_closure_with_expectation` for details on the overall + /// strategy. + fn merge_supplied_sig_with_expectation( + &mut self, + decl_inputs: &[Option], + decl_output: Option, + mut expected_sigs: ClosureSignatures<'db>, + ) -> InferResult<'db, ClosureSignatures<'db>> { + // Get the signature S that the user gave. + // + // (See comment on `sig_of_closure_with_expectation` for the + // meaning of these letters.) + let supplied_sig = self.supplied_sig_of_closure(decl_inputs, decl_output); + + debug!(?supplied_sig); + + // FIXME(#45727): As discussed in [this comment][c1], naively + // forcing equality here actually results in suboptimal error + // messages in some cases. For now, if there would have been + // an obvious error, we fallback to declaring the type of the + // closure to be the one the user gave, which allows other + // error message code to trigger. + // + // However, I think [there is potential to do even better + // here][c2], since in *this* code we have the precise span of + // the type parameter in question in hand when we report the + // error. + // + // [c1]: https://github.com/rust-lang/rust/pull/45072#issuecomment-341089706 + // [c2]: https://github.com/rust-lang/rust/pull/45072#issuecomment-341096796 + self.table.commit_if_ok(|table| { + let mut all_obligations = PredicateObligations::new(); + let supplied_sig = table.infer_ctxt.instantiate_binder_with_fresh_vars( + BoundRegionConversionTime::FnCall, + supplied_sig, ); - self.current_capture_span_stack.pop(); // Remove the pattern span. - } - } - fn mutate_expr(&mut self, expr: ExprId, place: Option) { - if let Some(place) = place { - self.add_capture( - place, - CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), + // The liberated version of this signature should be a subtype + // of the liberated form of the expectation. + for (supplied_ty, expected_ty) in + iter::zip(supplied_sig.inputs(), expected_sigs.liberated_sig.inputs()) + { + // Check that E' = S'. + let cause = ObligationCause::new(); + let InferOk { value: (), obligations } = table + .infer_ctxt + .at(&cause, table.param_env) + .eq(DefineOpaqueTypes::Yes, expected_ty, supplied_ty)?; + all_obligations.extend(obligations); + } + + let supplied_output_ty = supplied_sig.output(); + let cause = ObligationCause::new(); + let InferOk { value: (), obligations } = + table.infer_ctxt.at(&cause, table.param_env).eq( + DefineOpaqueTypes::Yes, + expected_sigs.liberated_sig.output(), + supplied_output_ty, + )?; + all_obligations.extend(obligations); + + let inputs = supplied_sig + .inputs() + .into_iter() + .map(|ty| table.infer_ctxt.resolve_vars_if_possible(ty)); + + expected_sigs.liberated_sig = table.interner.mk_fn_sig( + inputs, + supplied_output_ty, + expected_sigs.liberated_sig.c_variadic, + Safety::Safe, + FnAbi::RustCall, ); - } - self.walk_expr(expr); - } - fn consume_expr(&mut self, expr: ExprId) { - if let Some(place) = self.place_of_expr(expr) { - self.consume_place(place); - } - self.walk_expr(expr); + Ok(InferOk { value: expected_sigs, obligations: all_obligations }) + }) } - fn consume_place(&mut self, place: HirPlace) { - if self.is_upvar(&place) { - let ty = place.ty(self); - let kind = if self.is_ty_copy(ty) { - CaptureKind::ByRef(BorrowKind::Shared) - } else { - CaptureKind::ByValue - }; - self.push_capture(place, kind); - } - } + /// If there is no expected signature, then we will convert the + /// types that the user gave into a signature. + /// + /// Also, record this closure signature for later. + fn supplied_sig_of_closure( + &mut self, + decl_inputs: &[Option], + decl_output: Option, + ) -> PolyFnSig<'db> { + let interner = self.table.interner; - fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) { - if let Some((last, rest)) = adjustment.split_last() { - match &last.kind { - Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => { - self.walk_expr_with_adjust(tgt_expr, rest) - } - Adjust::Deref(Some(m)) => match m.0 { - Some(m) => { - self.ref_capture_with_adjusts(m, tgt_expr, rest); - } - None => unreachable!(), - }, - Adjust::Borrow(b) => { - self.ref_capture_with_adjusts(b.mutability(), tgt_expr, rest); - } + let supplied_return = match decl_output { + Some(output) => { + let output = self.make_body_ty(output); + self.process_user_written_ty(output).to_nextsolver(interner) } - } else { - self.walk_expr_without_adjust(tgt_expr); - } - } - - fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) { - let capture_kind = match m { - Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), - Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), + None => self.table.next_ty_var(), }; - if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) - && let Some(place) = - apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest) - { - self.add_capture(place, capture_kind); - } - self.walk_expr_with_adjust(tgt_expr, rest); - } - - fn walk_expr(&mut self, tgt_expr: ExprId) { - if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) { - // FIXME: this take is completely unneeded, and just is here to make borrow checker - // happy. Remove it if you can. - let x_taken = mem::take(it); - self.walk_expr_with_adjust(tgt_expr, &x_taken); - *self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken; - } else { - self.walk_expr_without_adjust(tgt_expr); - } - } - - fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) { - match &self.body[tgt_expr] { - Expr::OffsetOf(_) => (), - Expr::InlineAsm(e) => e.operands.iter().for_each(|(_, op)| match op { - AsmOperand::In { expr, .. } - | AsmOperand::Out { expr: Some(expr), .. } - | AsmOperand::InOut { expr, .. } => self.walk_expr_without_adjust(*expr), - AsmOperand::SplitInOut { in_expr, out_expr, .. } => { - self.walk_expr_without_adjust(*in_expr); - if let Some(out_expr) = out_expr { - self.walk_expr_without_adjust(*out_expr); - } - } - AsmOperand::Out { expr: None, .. } - | AsmOperand::Const(_) - | AsmOperand::Label(_) - | AsmOperand::Sym(_) => (), - }), - Expr::If { condition, then_branch, else_branch } => { - self.consume_expr(*condition); - self.consume_expr(*then_branch); - if let &Some(expr) = else_branch { - self.consume_expr(expr); - } - } - Expr::Async { statements, tail, .. } - | Expr::Unsafe { statements, tail, .. } - | Expr::Block { statements, tail, .. } => { - for s in statements.iter() { - match s { - Statement::Let { pat, type_ref: _, initializer, else_branch } => { - if let Some(else_branch) = else_branch { - self.consume_expr(*else_branch); - } - if let Some(initializer) = initializer { - if else_branch.is_some() { - self.consume_expr(*initializer); - } else { - self.walk_expr(*initializer); - } - if let Some(place) = self.place_of_expr(*initializer) { - self.consume_with_pat(place, *pat); - } - } - } - Statement::Expr { expr, has_semi: _ } => { - self.consume_expr(*expr); - } - Statement::Item(_) => (), - } - } - if let Some(tail) = tail { - self.consume_expr(*tail); - } - } - Expr::Call { callee, args } => { - self.consume_expr(*callee); - self.consume_exprs(args.iter().copied()); - } - Expr::MethodCall { receiver, args, .. } => { - self.consume_expr(*receiver); - self.consume_exprs(args.iter().copied()); - } - Expr::Match { expr, arms } => { - for arm in arms.iter() { - self.consume_expr(arm.expr); - if let Some(guard) = arm.guard { - self.consume_expr(guard); - } - } - self.walk_expr(*expr); - if let Some(discr_place) = self.place_of_expr(*expr) - && self.is_upvar(&discr_place) - { - let mut capture_mode = None; - for arm in arms.iter() { - self.walk_pat(&mut capture_mode, arm.pat); - } - if let Some(c) = capture_mode { - self.push_capture(discr_place, c); - } - } - } - Expr::Break { expr, label: _ } - | Expr::Return { expr } - | Expr::Yield { expr } - | Expr::Yeet { expr } => { - if let &Some(expr) = expr { - self.consume_expr(expr); - } - } - &Expr::Become { expr } => { - self.consume_expr(expr); - } - Expr::RecordLit { fields, spread, .. } => { - if let &Some(expr) = spread { - self.consume_expr(expr); - } - self.consume_exprs(fields.iter().map(|it| it.expr)); - } - Expr::Field { expr, name: _ } => self.select_from_expr(*expr), - Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - if matches!( - self.expr_ty_after_adjustments(*expr).kind(Interner), - TyKind::Ref(..) | TyKind::Raw(..) - ) { - self.select_from_expr(*expr); - } else if let Some((f, _)) = self.result.method_resolution(tgt_expr) { - let mutability = 'b: { - if let Some(deref_trait) = - self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) - && let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - break 'b deref_fn == f; - } - false - }; - let place = self.place_of_expr(*expr); - if mutability { - self.mutate_expr(*expr, place); - } else { - self.ref_expr(*expr, place); - } - } else { - self.select_from_expr(*expr); - } - } - Expr::Let { pat, expr } => { - self.walk_expr(*expr); - if let Some(place) = self.place_of_expr(*expr) { - self.consume_with_pat(place, *pat); - } - } - Expr::UnaryOp { expr, op: _ } - | Expr::Array(Array::Repeat { initializer: expr, repeat: _ }) - | Expr::Await { expr } - | Expr::Loop { body: expr, label: _ } - | Expr::Box { expr } - | Expr::Cast { expr, type_ref: _ } => { - self.consume_expr(*expr); - } - Expr::Ref { expr, rawness: _, mutability } => { - // We need to do this before we push the span so the order will be correct. - let place = self.place_of_expr(*expr); - self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); - match mutability { - hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place), - hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place), - } - } - Expr::BinaryOp { lhs, rhs, op } => { - let Some(op) = op else { - return; - }; - if matches!(op, BinaryOp::Assignment { .. }) { - let place = self.place_of_expr(*lhs); - self.mutate_expr(*lhs, place); - self.consume_expr(*rhs); - return; - } - self.consume_expr(*lhs); - self.consume_expr(*rhs); - } - Expr::Range { lhs, rhs, range_type: _ } => { - if let &Some(expr) = lhs { - self.consume_expr(expr); - } - if let &Some(expr) = rhs { - self.consume_expr(expr); - } - } - Expr::Index { base, index } => { - self.select_from_expr(*base); - self.consume_expr(*index); - } - Expr::Closure { .. } => { - let ty = self.expr_ty(tgt_expr); - let TyKind::Closure(id, _) = ty.kind(Interner) else { - never!("closure type is always closure"); - return; - }; - let (captures, _) = - self.result.closure_info.get(id).expect( - "We sort closures, so we should always have data for inner closures", - ); - let mut cc = mem::take(&mut self.current_captures); - cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| { - CapturedItemWithoutTy { - place: it.place.clone(), - kind: it.kind, - span_stacks: it.span_stacks.clone(), - } - })); - self.current_captures = cc; - } - Expr::Array(Array::ElementList { elements: exprs }) | Expr::Tuple { exprs } => { - self.consume_exprs(exprs.iter().copied()) - } - &Expr::Assignment { target, value } => { - self.walk_expr(value); - let resolver_guard = - self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr); - match self.place_of_expr(value) { - Some(rhs_place) => { - self.inside_assignment = true; - self.consume_with_pat(rhs_place, target); - self.inside_assignment = false; - } - None => self.body.walk_pats(target, &mut |pat| match &self.body[pat] { - Pat::Path(path) => self.mutate_path_pat(path, pat), - &Pat::Expr(expr) => { - let place = self.place_of_expr(expr); - self.mutate_expr(expr, place); - } - _ => {} - }), - } - self.resolver.reset_to_guard(resolver_guard); - } - - Expr::Missing - | Expr::Continue { .. } - | Expr::Path(_) - | Expr::Literal(_) - | Expr::Const(_) - | Expr::Underscore => (), - } - } - - fn walk_pat(&mut self, result: &mut Option, pat: PatId) { - let mut update_result = |ck: CaptureKind| match result { - Some(r) => { - *r = cmp::max(*r, ck); + // First, convert the types that the user supplied (if any). + let supplied_arguments = decl_inputs.iter().map(|&input| match input { + Some(input) => { + let input = self.make_body_ty(input); + self.process_user_written_ty(input).to_nextsolver(interner) } - None => *result = Some(ck), - }; + None => self.table.next_ty_var(), + }); - self.walk_pat_inner( - pat, - &mut update_result, - BorrowKind::Mut { kind: MutBorrowKind::Default }, - ); + Binder::dummy(interner.mk_fn_sig( + supplied_arguments, + supplied_return, + false, + Safety::Safe, + FnAbi::RustCall, + )) } - fn walk_pat_inner( + /// Converts the types that the user supplied, in case that doing + /// so should yield an error, but returns back a signature where + /// all parameters are of type `ty::Error`. + fn error_sig_of_closure( &mut self, - p: PatId, - update_result: &mut impl FnMut(CaptureKind), - mut for_mut: BorrowKind, - ) { - match &self.body[p] { - Pat::Ref { .. } - | Pat::Box { .. } - | Pat::Missing - | Pat::Wild - | Pat::Tuple { .. } - | Pat::Expr(_) - | Pat::Or(_) => (), - Pat::TupleStruct { .. } | Pat::Record { .. } => { - if let Some(variant) = self.result.variant_resolution_for_pat(p) { - let adt = variant.adt_id(self.db); - let is_multivariant = match adt { - hir_def::AdtId::EnumId(e) => e.enum_variants(self.db).variants.len() != 1, - _ => false, - }; - if is_multivariant { - update_result(CaptureKind::ByRef(BorrowKind::Shared)); - } - } - } - Pat::Slice { .. } - | Pat::ConstBlock(_) - | Pat::Path(_) - | Pat::Lit(_) - | Pat::Range { .. } => { - update_result(CaptureKind::ByRef(BorrowKind::Shared)); - } - Pat::Bind { id, .. } => match self.result.binding_modes[p] { - crate::BindingMode::Move => { - if self.is_ty_copy(self.result.type_of_binding[*id].clone()) { - update_result(CaptureKind::ByRef(BorrowKind::Shared)); - } else { - update_result(CaptureKind::ByValue); - } - } - crate::BindingMode::Ref(r) => match r { - Mutability::Mut => update_result(CaptureKind::ByRef(for_mut)), - Mutability::Not => update_result(CaptureKind::ByRef(BorrowKind::Shared)), - }, - }, - } - if self.result.pat_adjustments.get(&p).is_some_and(|it| !it.is_empty()) { - for_mut = BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }; - } - self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut)); - } - - fn expr_ty(&self, expr: ExprId) -> Ty { - self.result[expr].clone() - } - - fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { - let mut ty = None; - if let Some(it) = self.result.expr_adjustments.get(&e) - && let Some(it) = it.last() - { - ty = Some(it.target.clone()); - } - ty.unwrap_or_else(|| self.expr_ty(e)) - } - - fn is_upvar(&self, place: &HirPlace) -> bool { - if let Some(c) = self.current_closure { - let InternedClosure(_, root) = self.db.lookup_intern_closure(c.into()); - return self.body.is_binding_upvar(place.local, root); - } - false - } - - fn is_ty_copy(&mut self, ty: Ty) -> bool { - if let TyKind::Closure(id, _) = ty.kind(Interner) { - // FIXME: We handle closure as a special case, since chalk consider every closure as copy. We - // should probably let chalk know which closures are copy, but I don't know how doing it - // without creating query cycles. - return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true); - } - self.table.resolve_completely(ty).is_copy(self.db, self.owner) - } - - fn select_from_expr(&mut self, expr: ExprId) { - self.walk_expr(expr); - } - - fn restrict_precision_for_unsafe(&mut self) { - // FIXME: Borrow checker problems without this. - let mut current_captures = std::mem::take(&mut self.current_captures); - for capture in &mut current_captures { - let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone()); - if ty.as_raw_ptr().is_some() || ty.is_union() { - capture.kind = CaptureKind::ByRef(BorrowKind::Shared); - self.truncate_capture_spans(capture, 0); - capture.place.projections.truncate(0); - continue; - } - for (i, p) in capture.place.projections.iter().enumerate() { - ty = p.projected_ty( - ty, - self.db, - |_, _, _| { - unreachable!("Closure field only happens in MIR"); - }, - self.owner.module(self.db).krate(), - ); - if ty.as_raw_ptr().is_some() || ty.is_union() { - capture.kind = CaptureKind::ByRef(BorrowKind::Shared); - self.truncate_capture_spans(capture, i + 1); - capture.place.projections.truncate(i + 1); - break; - } - } - } - self.current_captures = current_captures; - } + decl_inputs: &[Option], + decl_output: Option, + ) -> PolyFnSig<'db> { + let interner = self.table.interner; + let err_ty = Ty::new_error(interner, ErrorGuaranteed); - fn adjust_for_move_closure(&mut self) { - // FIXME: Borrow checker won't allow without this. - let mut current_captures = std::mem::take(&mut self.current_captures); - for capture in &mut current_captures { - if let Some(first_deref) = - capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref) - { - self.truncate_capture_spans(capture, first_deref); - capture.place.projections.truncate(first_deref); - } - capture.kind = CaptureKind::ByValue; + if let Some(output) = decl_output { + self.make_body_ty(output); } - self.current_captures = current_captures; - } - - fn minimize_captures(&mut self) { - self.current_captures.sort_unstable_by_key(|it| it.place.projections.len()); - let mut hash_map = FxHashMap::::default(); - let result = mem::take(&mut self.current_captures); - for mut item in result { - let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] }; - let mut it = item.place.projections.iter(); - let prev_index = loop { - if let Some(k) = hash_map.get(&lookup_place) { - break Some(*k); - } - match it.next() { - Some(it) => { - lookup_place.projections.push(it.clone()); - } - None => break None, - } - }; - match prev_index { - Some(p) => { - let prev_projections_len = self.current_captures[p].place.projections.len(); - self.truncate_capture_spans(&mut item, prev_projections_len); - self.current_captures[p].span_stacks.extend(item.span_stacks); - let len = self.current_captures[p].place.projections.len(); - let kind_after_truncate = - item.place.capture_kind_of_truncated_place(item.kind, len); - self.current_captures[p].kind = - cmp::max(kind_after_truncate, self.current_captures[p].kind); - } - None => { - hash_map.insert(item.place.clone(), self.current_captures.len()); - self.current_captures.push(item); - } + let supplied_arguments = decl_inputs.iter().map(|&input| match input { + Some(input) => { + self.make_body_ty(input); + err_ty } - } - } - - fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) { - let adjustments_count = - self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default(); - place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref)); - self.current_capture_span_stack - .extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat))); - 'reset_span_stack: { - match &self.body[tgt_pat] { - Pat::Missing | Pat::Wild => (), - Pat::Tuple { args, ellipsis } => { - let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); - let field_count = match self.result[tgt_pat].kind(Interner) { - TyKind::Tuple(_, s) => s.len(Interner), - _ => break 'reset_span_stack, - }; - let fields = 0..field_count; - let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); - for (&arg, i) in it { - let mut p = place.clone(); - self.current_capture_span_stack.push(MirSpan::PatId(arg)); - p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId { - tuple: TupleId(!0), // dummy this, as its unused anyways - index: i as u32, - }))); - self.consume_with_pat(p, arg); - self.current_capture_span_stack.pop(); - } - } - Pat::Or(pats) => { - for pat in pats.iter() { - self.consume_with_pat(place.clone(), *pat); - } - } - Pat::Record { args, .. } => { - let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else { - break 'reset_span_stack; - }; - match variant { - VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { - self.consume_place(place) - } - VariantId::StructId(s) => { - let vd = s.fields(self.db); - for field_pat in args.iter() { - let arg = field_pat.pat; - let Some(local_id) = vd.field(&field_pat.name) else { - continue; - }; - let mut p = place.clone(); - self.current_capture_span_stack.push(MirSpan::PatId(arg)); - p.projections.push(ProjectionElem::Field(Either::Left(FieldId { - parent: variant, - local_id, - }))); - self.consume_with_pat(p, arg); - self.current_capture_span_stack.pop(); - } - } - } - } - Pat::Range { .. } | Pat::Slice { .. } | Pat::ConstBlock(_) | Pat::Lit(_) => { - self.consume_place(place) - } - Pat::Path(path) => { - if self.inside_assignment { - self.mutate_path_pat(path, tgt_pat); - } - self.consume_place(place); - } - &Pat::Bind { id, subpat: _ } => { - let mode = self.result.binding_modes[tgt_pat]; - let capture_kind = match mode { - BindingMode::Move => { - self.consume_place(place); - break 'reset_span_stack; - } - BindingMode::Ref(Mutability::Not) => BorrowKind::Shared, - BindingMode::Ref(Mutability::Mut) => { - BorrowKind::Mut { kind: MutBorrowKind::Default } - } - }; - self.current_capture_span_stack.push(MirSpan::BindingId(id)); - self.add_capture(place, CaptureKind::ByRef(capture_kind)); - self.current_capture_span_stack.pop(); - } - Pat::TupleStruct { path: _, args, ellipsis } => { - let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else { - break 'reset_span_stack; - }; - match variant { - VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { - self.consume_place(place) - } - VariantId::StructId(s) => { - let vd = s.fields(self.db); - let (al, ar) = - args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); - let fields = vd.fields().iter(); - let it = al - .iter() - .zip(fields.clone()) - .chain(ar.iter().rev().zip(fields.rev())); - for (&arg, (i, _)) in it { - let mut p = place.clone(); - self.current_capture_span_stack.push(MirSpan::PatId(arg)); - p.projections.push(ProjectionElem::Field(Either::Left(FieldId { - parent: variant, - local_id: i, - }))); - self.consume_with_pat(p, arg); - self.current_capture_span_stack.pop(); - } - } - } - } - Pat::Ref { pat, mutability: _ } => { - self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat)); - place.projections.push(ProjectionElem::Deref); - self.consume_with_pat(place, *pat); - self.current_capture_span_stack.pop(); - } - Pat::Box { .. } => (), // not supported - &Pat::Expr(expr) => { - self.consume_place(place); - let pat_capture_span_stack = mem::take(&mut self.current_capture_span_stack); - let old_inside_assignment = mem::replace(&mut self.inside_assignment, false); - let lhs_place = self.place_of_expr(expr); - self.mutate_expr(expr, lhs_place); - self.inside_assignment = old_inside_assignment; - self.current_capture_span_stack = pat_capture_span_stack; - } - } - } - self.current_capture_span_stack - .truncate(self.current_capture_span_stack.len() - adjustments_count); - } - - fn consume_exprs(&mut self, exprs: impl Iterator) { - for expr in exprs { - self.consume_expr(expr); - } - } - - fn closure_kind(&self) -> FnTrait { - let mut r = FnTrait::Fn; - for it in &self.current_captures { - r = cmp::min( - r, - match &it.kind { - CaptureKind::ByRef(BorrowKind::Mut { .. }) => FnTrait::FnMut, - CaptureKind::ByRef(BorrowKind::Shallow | BorrowKind::Shared) => FnTrait::Fn, - CaptureKind::ByValue => FnTrait::FnOnce, - }, - ) - } - r - } + None => err_ty, + }); - fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait { - let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into()); - self.current_closure = Some(closure); - let Expr::Closure { body, capture_by, .. } = &self.body[root] else { - unreachable!("Closure expression id is always closure"); - }; - self.consume_expr(*body); - for item in &self.current_captures { - if matches!( - item.kind, - CaptureKind::ByRef(BorrowKind::Mut { - kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow - }) - ) && !item.place.projections.contains(&ProjectionElem::Deref) - { - // FIXME: remove the `mutated_bindings_in_closure` completely and add proper fake reads in - // MIR. I didn't do that due duplicate diagnostics. - self.result.mutated_bindings_in_closure.insert(item.place.local); - } - } - self.restrict_precision_for_unsafe(); - // `closure_kind` should be done before adjust_for_move_closure - // If there exists pre-deduced kind of a closure, use it instead of one determined by capture, as rustc does. - // rustc also does diagnostics here if the latter is not a subtype of the former. - let closure_kind = self - .result - .closure_info - .get(&closure) - .map_or_else(|| self.closure_kind(), |info| info.1); - match capture_by { - CaptureBy::Value => self.adjust_for_move_closure(), - CaptureBy::Ref => (), - } - self.minimize_captures(); - self.strip_captures_ref_span(); - let result = mem::take(&mut self.current_captures); - let captures = result.into_iter().map(|it| it.with_ty(self)).collect::>(); - self.result.closure_info.insert(closure, (captures, closure_kind)); - closure_kind - } + let result = Binder::dummy(interner.mk_fn_sig( + supplied_arguments, + err_ty, + false, + Safety::Safe, + FnAbi::RustCall, + )); - fn strip_captures_ref_span(&mut self) { - // FIXME: Borrow checker won't allow without this. - let mut captures = std::mem::take(&mut self.current_captures); - for capture in &mut captures { - if matches!(capture.kind, CaptureKind::ByValue) { - for span_stack in &mut capture.span_stacks { - if span_stack[span_stack.len() - 1].is_ref_span(self.body) { - span_stack.truncate(span_stack.len() - 1); - } - } - } - } - self.current_captures = captures; - } + debug!("supplied_sig_of_closure: result={:?}", result); - pub(crate) fn infer_closures(&mut self) { - let deferred_closures = self.sort_closures(); - for (closure, exprs) in deferred_closures.into_iter().rev() { - self.current_captures = vec![]; - let kind = self.analyze_closure(closure); - - for (derefed_callee, callee_ty, params, expr) in exprs { - if let &Expr::Call { callee, .. } = &self.body[expr] { - let mut adjustments = - self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec(); - self.write_fn_trait_method_resolution( - kind, - &derefed_callee, - &mut adjustments, - &callee_ty, - ¶ms, - expr, - ); - self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice()); - } - } - } - } - - /// We want to analyze some closures before others, to have a correct analysis: - /// * We should analyze nested closures before the parent, since the parent should capture some of - /// the things that its children captures. - /// * If a closure calls another closure, we need to analyze the callee, to find out how we should - /// capture it (e.g. by move for FnOnce) - /// - /// These dependencies are collected in the main inference. We do a topological sort in this function. It - /// will consume the `deferred_closures` field and return its content in a sorted vector. - fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec, ExprId)>)> { - let mut deferred_closures = mem::take(&mut self.deferred_closures); - let mut dependents_count: FxHashMap = - deferred_closures.keys().map(|it| (*it, 0)).collect(); - for deps in self.closure_dependencies.values() { - for dep in deps { - *dependents_count.entry(*dep).or_default() += 1; - } - } - let mut queue: Vec<_> = - deferred_closures.keys().copied().filter(|it| dependents_count[it] == 0).collect(); - let mut result = vec![]; - while let Some(it) = queue.pop() { - if let Some(d) = deferred_closures.remove(&it) { - result.push((it, d)); - } - for dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) { - let cnt = dependents_count.get_mut(dep).unwrap(); - *cnt -= 1; - if *cnt == 0 { - queue.push(*dep); - } - } - } - assert!(deferred_closures.is_empty(), "we should have analyzed all closures"); result } - pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) { - if let Some(c) = self.current_closure - && !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) - { - self.closure_dependencies.entry(c).or_default().push(dep); - } - - fn dep_creates_cycle( - closure_dependencies: &FxHashMap>, - visited: &mut FxHashSet, - from: ClosureId, - to: ClosureId, - ) -> bool { - if !visited.insert(from) { - return false; - } - - if from == to { - return true; - } - - if let Some(deps) = closure_dependencies.get(&to) { - for dep in deps { - if dep_creates_cycle(closure_dependencies, visited, from, *dep) { - return true; - } - } - } - - false - } - } -} - -/// Call this only when the last span in the stack isn't a split. -fn apply_adjusts_to_place( - current_capture_span_stack: &mut Vec, - mut r: HirPlace, - adjustments: &[Adjustment], -) -> Option { - let span = *current_capture_span_stack.last().expect("empty capture span stack"); - for adj in adjustments { - match &adj.kind { - Adjust::Deref(None) => { - current_capture_span_stack.push(span); - r.projections.push(ProjectionElem::Deref); - } - _ => return None, - } + fn closure_sigs(&self, bound_sig: PolyFnSig<'db>) -> ClosureSignatures<'db> { + let liberated_sig = bound_sig.skip_binder(); + // FIXME: When we lower HRTB we'll need to actually liberate regions here. + ClosureSignatures { bound_sig, liberated_sig } } - Some(r) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs new file mode 100644 index 0000000000000..fd14b9e2de571 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs @@ -0,0 +1,1298 @@ +//! Post-inference closure analysis: captures and closure kind. + +use std::{cmp, convert::Infallible, mem}; + +use chalk_ir::{ + BoundVar, DebruijnIndex, Mutability, TyKind, + fold::{FallibleTypeFolder, TypeFoldable}, +}; +use either::Either; +use hir_def::{ + DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId, + expr_store::path::Path, + hir::{ + Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId, + Statement, UnaryOp, + }, + item_tree::FieldsShape, + lang_item::LangItem, + resolver::ValueNs, +}; +use hir_expand::name::Name; +use intern::sym; +use rustc_hash::{FxHashMap, FxHashSet}; +use smallvec::{SmallVec, smallvec}; +use stdx::{format_to, never}; +use syntax::utils::is_raw_identifier; + +use crate::db::InternedClosureId; +use crate::infer::InferenceContext; +use crate::{ + Adjust, Adjustment, Binders, BindingMode, ClosureId, Interner, Substitution, Ty, TyExt, + db::{HirDatabase, InternedClosure}, + error_lifetime, from_placeholder_idx, + generics::Generics, + make_binders, + mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem}, + traits::FnTrait, + utils, +}; + +// The below functions handle capture and closure kind (Fn, FnMut, ..) + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct HirPlace { + pub(crate) local: BindingId, + pub(crate) projections: Vec>, +} + +impl HirPlace { + fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty { + let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone()); + for p in &self.projections { + ty = p.projected_ty( + ty, + ctx.db, + |_, _, _| { + unreachable!("Closure field only happens in MIR"); + }, + ctx.owner.module(ctx.db).krate(), + ); + } + ty + } + + fn capture_kind_of_truncated_place( + &self, + mut current_capture: CaptureKind, + len: usize, + ) -> CaptureKind { + if let CaptureKind::ByRef(BorrowKind::Mut { + kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow, + }) = current_capture + && self.projections[len..].contains(&ProjectionElem::Deref) + { + current_capture = + CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }); + } + current_capture + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum CaptureKind { + ByRef(BorrowKind), + ByValue, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CapturedItem { + pub(crate) place: HirPlace, + pub(crate) kind: CaptureKind, + /// The inner vec is the stacks; the outer vec is for each capture reference. + /// + /// Even though we always report only the last span (i.e. the most inclusive span), + /// we need to keep them all, since when a closure occurs inside a closure, we + /// copy all captures of the inner closure to the outer closure, and then we may + /// truncate them, and we want the correct span to be reported. + span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, + pub(crate) ty: Binders, +} + +impl CapturedItem { + pub fn local(&self) -> BindingId { + self.place.local + } + + /// Returns whether this place has any field (aka. non-deref) projections. + pub fn has_field_projections(&self) -> bool { + self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref)) + } + + pub fn ty(&self, db: &dyn HirDatabase, subst: &Substitution) -> Ty { + self.ty.clone().substitute(Interner, &utils::ClosureSubst(subst).parent_subst(db)) + } + + pub fn kind(&self) -> CaptureKind { + self.kind + } + + pub fn spans(&self) -> SmallVec<[MirSpan; 3]> { + self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect() + } + + /// Converts the place to a name that can be inserted into source code. + pub fn place_to_name(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { + let body = db.body(owner); + let mut result = body[self.place.local].name.as_str().to_owned(); + for proj in &self.place.projections { + match proj { + ProjectionElem::Deref => {} + ProjectionElem::Field(Either::Left(f)) => { + let variant_data = f.parent.fields(db); + match variant_data.shape { + FieldsShape::Record => { + result.push('_'); + result.push_str(variant_data.fields()[f.local_id].name.as_str()) + } + FieldsShape::Tuple => { + let index = + variant_data.fields().iter().position(|it| it.0 == f.local_id); + if let Some(index) = index { + format_to!(result, "_{index}"); + } + } + FieldsShape::Unit => {} + } + } + ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index), + &ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"), + ProjectionElem::Index(_) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } + | ProjectionElem::OpaqueCast(_) => { + never!("Not happen in closure capture"); + continue; + } + } + } + if is_raw_identifier(&result, owner.module(db).krate().data(db).edition) { + result.insert_str(0, "r#"); + } + result + } + + pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { + let body = db.body(owner); + let krate = owner.krate(db); + let edition = krate.data(db).edition; + let mut result = body[self.place.local].name.display(db, edition).to_string(); + for proj in &self.place.projections { + match proj { + // In source code autoderef kicks in. + ProjectionElem::Deref => {} + ProjectionElem::Field(Either::Left(f)) => { + let variant_data = f.parent.fields(db); + match variant_data.shape { + FieldsShape::Record => format_to!( + result, + ".{}", + variant_data.fields()[f.local_id].name.display(db, edition) + ), + FieldsShape::Tuple => format_to!( + result, + ".{}", + variant_data + .fields() + .iter() + .position(|it| it.0 == f.local_id) + .unwrap_or_default() + ), + FieldsShape::Unit => {} + } + } + ProjectionElem::Field(Either::Right(f)) => { + let field = f.index; + format_to!(result, ".{field}"); + } + &ProjectionElem::ClosureField(field) => { + format_to!(result, ".{field}"); + } + ProjectionElem::Index(_) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } + | ProjectionElem::OpaqueCast(_) => { + never!("Not happen in closure capture"); + continue; + } + } + } + let final_derefs_count = self + .place + .projections + .iter() + .rev() + .take_while(|proj| matches!(proj, ProjectionElem::Deref)) + .count(); + result.insert_str(0, &"*".repeat(final_derefs_count)); + result + } + + pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { + let body = db.body(owner); + let krate = owner.krate(db); + let edition = krate.data(db).edition; + let mut result = body[self.place.local].name.display(db, edition).to_string(); + let mut field_need_paren = false; + for proj in &self.place.projections { + match proj { + ProjectionElem::Deref => { + result = format!("*{result}"); + field_need_paren = true; + } + ProjectionElem::Field(Either::Left(f)) => { + if field_need_paren { + result = format!("({result})"); + } + let variant_data = f.parent.fields(db); + let field = match variant_data.shape { + FieldsShape::Record => { + variant_data.fields()[f.local_id].name.as_str().to_owned() + } + FieldsShape::Tuple => variant_data + .fields() + .iter() + .position(|it| it.0 == f.local_id) + .unwrap_or_default() + .to_string(), + FieldsShape::Unit => "[missing field]".to_owned(), + }; + result = format!("{result}.{field}"); + field_need_paren = false; + } + ProjectionElem::Field(Either::Right(f)) => { + let field = f.index; + if field_need_paren { + result = format!("({result})"); + } + result = format!("{result}.{field}"); + field_need_paren = false; + } + &ProjectionElem::ClosureField(field) => { + if field_need_paren { + result = format!("({result})"); + } + result = format!("{result}.{field}"); + field_need_paren = false; + } + ProjectionElem::Index(_) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } + | ProjectionElem::OpaqueCast(_) => { + never!("Not happen in closure capture"); + continue; + } + } + } + result + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct CapturedItemWithoutTy { + pub(crate) place: HirPlace, + pub(crate) kind: CaptureKind, + /// The inner vec is the stacks; the outer vec is for each capture reference. + pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, +} + +impl CapturedItemWithoutTy { + fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem { + let ty = self.place.ty(ctx); + let ty = match &self.kind { + CaptureKind::ByValue => ty, + CaptureKind::ByRef(bk) => { + let m = match bk { + BorrowKind::Mut { .. } => Mutability::Mut, + _ => Mutability::Not, + }; + TyKind::Ref(m, error_lifetime(), ty).intern(Interner) + } + }; + return CapturedItem { + place: self.place, + kind: self.kind, + span_stacks: self.span_stacks, + ty: replace_placeholder_with_binder(ctx, ty), + }; + + fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders { + struct Filler<'a> { + db: &'a dyn HirDatabase, + generics: &'a Generics, + } + impl FallibleTypeFolder for Filler<'_> { + type Error = (); + + fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder { + self + } + + fn interner(&self) -> Interner { + Interner + } + + fn try_fold_free_placeholder_const( + &mut self, + ty: chalk_ir::Ty, + idx: chalk_ir::PlaceholderIndex, + outer_binder: DebruijnIndex, + ) -> Result, Self::Error> { + let x = from_placeholder_idx(self.db, idx).0; + let Some(idx) = self.generics.type_or_const_param_idx(x) else { + return Err(()); + }; + Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty)) + } + + fn try_fold_free_placeholder_ty( + &mut self, + idx: chalk_ir::PlaceholderIndex, + outer_binder: DebruijnIndex, + ) -> std::result::Result { + let x = from_placeholder_idx(self.db, idx).0; + let Some(idx) = self.generics.type_or_const_param_idx(x) else { + return Err(()); + }; + Ok(BoundVar::new(outer_binder, idx).to_ty(Interner)) + } + } + let filler = &mut Filler { db: ctx.db, generics: ctx.generics() }; + let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty); + make_binders(ctx.db, filler.generics, result) + } + } +} + +impl InferenceContext<'_> { + fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option { + let r = self.place_of_expr_without_adjust(tgt_expr)?; + let adjustments = + self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default(); + apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments) + } + + /// Pushes the span into `current_capture_span_stack`, *without clearing it first*. + fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option { + if path.type_anchor().is_some() { + return None; + } + let hygiene = self.body.expr_or_pat_path_hygiene(id); + self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| { + match result { + ValueNs::LocalBinding(binding) => { + let mir_span = match id { + ExprOrPatId::ExprId(id) => MirSpan::ExprId(id), + ExprOrPatId::PatId(id) => MirSpan::PatId(id), + }; + self.current_capture_span_stack.push(mir_span); + Some(HirPlace { local: binding, projections: Vec::new() }) + } + _ => None, + } + }) + } + + /// Changes `current_capture_span_stack` to contain the stack of spans for this expr. + fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option { + self.current_capture_span_stack.clear(); + match &self.body[tgt_expr] { + Expr::Path(p) => { + let resolver_guard = + self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr); + let result = self.path_place(p, tgt_expr.into()); + self.resolver.reset_to_guard(resolver_guard); + return result; + } + Expr::Field { expr, name: _ } => { + let mut place = self.place_of_expr(*expr)?; + let field = self.result.field_resolution(tgt_expr)?; + self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); + place.projections.push(ProjectionElem::Field(field)); + return Some(place); + } + Expr::UnaryOp { expr, op: UnaryOp::Deref } => { + if matches!( + self.expr_ty_after_adjustments(*expr).kind(Interner), + TyKind::Ref(..) | TyKind::Raw(..) + ) { + let mut place = self.place_of_expr(*expr)?; + self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); + place.projections.push(ProjectionElem::Deref); + return Some(place); + } + } + _ => (), + } + None + } + + fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) { + self.current_captures.push(CapturedItemWithoutTy { + place, + kind, + span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()], + }); + } + + fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) { + // The first span is the identifier, and it must always remain. + truncate_to += 1; + for span_stack in &mut capture.span_stacks { + let mut remained = truncate_to; + let mut actual_truncate_to = 0; + for &span in &*span_stack { + actual_truncate_to += 1; + if !span.is_ref_span(self.body) { + remained -= 1; + if remained == 0 { + break; + } + } + } + if actual_truncate_to < span_stack.len() + && span_stack[actual_truncate_to].is_ref_span(self.body) + { + // Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect. + actual_truncate_to += 1; + } + span_stack.truncate(actual_truncate_to); + } + } + + fn ref_expr(&mut self, expr: ExprId, place: Option) { + if let Some(place) = place { + self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared)); + } + self.walk_expr(expr); + } + + fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) { + if self.is_upvar(&place) { + self.push_capture(place, kind); + } + } + + fn mutate_path_pat(&mut self, path: &Path, id: PatId) { + if let Some(place) = self.path_place(path, id.into()) { + self.add_capture( + place, + CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), + ); + self.current_capture_span_stack.pop(); // Remove the pattern span. + } + } + + fn mutate_expr(&mut self, expr: ExprId, place: Option) { + if let Some(place) = place { + self.add_capture( + place, + CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), + ); + } + self.walk_expr(expr); + } + + fn consume_expr(&mut self, expr: ExprId) { + if let Some(place) = self.place_of_expr(expr) { + self.consume_place(place); + } + self.walk_expr(expr); + } + + fn consume_place(&mut self, place: HirPlace) { + if self.is_upvar(&place) { + let ty = place.ty(self); + let kind = if self.is_ty_copy(ty) { + CaptureKind::ByRef(BorrowKind::Shared) + } else { + CaptureKind::ByValue + }; + self.push_capture(place, kind); + } + } + + fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) { + if let Some((last, rest)) = adjustment.split_last() { + match &last.kind { + Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => { + self.walk_expr_with_adjust(tgt_expr, rest) + } + Adjust::Deref(Some(m)) => match m.0 { + Some(m) => { + self.ref_capture_with_adjusts(m, tgt_expr, rest); + } + None => unreachable!(), + }, + Adjust::Borrow(b) => { + self.ref_capture_with_adjusts(b.mutability(), tgt_expr, rest); + } + } + } else { + self.walk_expr_without_adjust(tgt_expr); + } + } + + fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) { + let capture_kind = match m { + Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), + Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), + }; + if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) + && let Some(place) = + apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest) + { + self.add_capture(place, capture_kind); + } + self.walk_expr_with_adjust(tgt_expr, rest); + } + + fn walk_expr(&mut self, tgt_expr: ExprId) { + if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) { + // FIXME: this take is completely unneeded, and just is here to make borrow checker + // happy. Remove it if you can. + let x_taken = mem::take(it); + self.walk_expr_with_adjust(tgt_expr, &x_taken); + *self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken; + } else { + self.walk_expr_without_adjust(tgt_expr); + } + } + + fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) { + match &self.body[tgt_expr] { + Expr::OffsetOf(_) => (), + Expr::InlineAsm(e) => e.operands.iter().for_each(|(_, op)| match op { + AsmOperand::In { expr, .. } + | AsmOperand::Out { expr: Some(expr), .. } + | AsmOperand::InOut { expr, .. } => self.walk_expr_without_adjust(*expr), + AsmOperand::SplitInOut { in_expr, out_expr, .. } => { + self.walk_expr_without_adjust(*in_expr); + if let Some(out_expr) = out_expr { + self.walk_expr_without_adjust(*out_expr); + } + } + AsmOperand::Out { expr: None, .. } + | AsmOperand::Const(_) + | AsmOperand::Label(_) + | AsmOperand::Sym(_) => (), + }), + Expr::If { condition, then_branch, else_branch } => { + self.consume_expr(*condition); + self.consume_expr(*then_branch); + if let &Some(expr) = else_branch { + self.consume_expr(expr); + } + } + Expr::Async { statements, tail, .. } + | Expr::Unsafe { statements, tail, .. } + | Expr::Block { statements, tail, .. } => { + for s in statements.iter() { + match s { + Statement::Let { pat, type_ref: _, initializer, else_branch } => { + if let Some(else_branch) = else_branch { + self.consume_expr(*else_branch); + } + if let Some(initializer) = initializer { + if else_branch.is_some() { + self.consume_expr(*initializer); + } else { + self.walk_expr(*initializer); + } + if let Some(place) = self.place_of_expr(*initializer) { + self.consume_with_pat(place, *pat); + } + } + } + Statement::Expr { expr, has_semi: _ } => { + self.consume_expr(*expr); + } + Statement::Item(_) => (), + } + } + if let Some(tail) = tail { + self.consume_expr(*tail); + } + } + Expr::Call { callee, args } => { + self.consume_expr(*callee); + self.consume_exprs(args.iter().copied()); + } + Expr::MethodCall { receiver, args, .. } => { + self.consume_expr(*receiver); + self.consume_exprs(args.iter().copied()); + } + Expr::Match { expr, arms } => { + for arm in arms.iter() { + self.consume_expr(arm.expr); + if let Some(guard) = arm.guard { + self.consume_expr(guard); + } + } + self.walk_expr(*expr); + if let Some(discr_place) = self.place_of_expr(*expr) + && self.is_upvar(&discr_place) + { + let mut capture_mode = None; + for arm in arms.iter() { + self.walk_pat(&mut capture_mode, arm.pat); + } + if let Some(c) = capture_mode { + self.push_capture(discr_place, c); + } + } + } + Expr::Break { expr, label: _ } + | Expr::Return { expr } + | Expr::Yield { expr } + | Expr::Yeet { expr } => { + if let &Some(expr) = expr { + self.consume_expr(expr); + } + } + &Expr::Become { expr } => { + self.consume_expr(expr); + } + Expr::RecordLit { fields, spread, .. } => { + if let &Some(expr) = spread { + self.consume_expr(expr); + } + self.consume_exprs(fields.iter().map(|it| it.expr)); + } + Expr::Field { expr, name: _ } => self.select_from_expr(*expr), + Expr::UnaryOp { expr, op: UnaryOp::Deref } => { + if matches!( + self.expr_ty_after_adjustments(*expr).kind(Interner), + TyKind::Ref(..) | TyKind::Raw(..) + ) { + self.select_from_expr(*expr); + } else if let Some((f, _)) = self.result.method_resolution(tgt_expr) { + let mutability = 'b: { + if let Some(deref_trait) = + self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) + && let Some(deref_fn) = deref_trait + .trait_items(self.db) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) + { + break 'b deref_fn == f; + } + false + }; + let place = self.place_of_expr(*expr); + if mutability { + self.mutate_expr(*expr, place); + } else { + self.ref_expr(*expr, place); + } + } else { + self.select_from_expr(*expr); + } + } + Expr::Let { pat, expr } => { + self.walk_expr(*expr); + if let Some(place) = self.place_of_expr(*expr) { + self.consume_with_pat(place, *pat); + } + } + Expr::UnaryOp { expr, op: _ } + | Expr::Array(Array::Repeat { initializer: expr, repeat: _ }) + | Expr::Await { expr } + | Expr::Loop { body: expr, label: _ } + | Expr::Box { expr } + | Expr::Cast { expr, type_ref: _ } => { + self.consume_expr(*expr); + } + Expr::Ref { expr, rawness: _, mutability } => { + // We need to do this before we push the span so the order will be correct. + let place = self.place_of_expr(*expr); + self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); + match mutability { + hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place), + hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place), + } + } + Expr::BinaryOp { lhs, rhs, op } => { + let Some(op) = op else { + return; + }; + if matches!(op, BinaryOp::Assignment { .. }) { + let place = self.place_of_expr(*lhs); + self.mutate_expr(*lhs, place); + self.consume_expr(*rhs); + return; + } + self.consume_expr(*lhs); + self.consume_expr(*rhs); + } + Expr::Range { lhs, rhs, range_type: _ } => { + if let &Some(expr) = lhs { + self.consume_expr(expr); + } + if let &Some(expr) = rhs { + self.consume_expr(expr); + } + } + Expr::Index { base, index } => { + self.select_from_expr(*base); + self.consume_expr(*index); + } + Expr::Closure { .. } => { + let ty = self.expr_ty(tgt_expr); + let TyKind::Closure(id, _) = ty.kind(Interner) else { + never!("closure type is always closure"); + return; + }; + let (captures, _) = + self.result.closure_info.get(id).expect( + "We sort closures, so we should always have data for inner closures", + ); + let mut cc = mem::take(&mut self.current_captures); + cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| { + CapturedItemWithoutTy { + place: it.place.clone(), + kind: it.kind, + span_stacks: it.span_stacks.clone(), + } + })); + self.current_captures = cc; + } + Expr::Array(Array::ElementList { elements: exprs }) | Expr::Tuple { exprs } => { + self.consume_exprs(exprs.iter().copied()) + } + &Expr::Assignment { target, value } => { + self.walk_expr(value); + let resolver_guard = + self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr); + match self.place_of_expr(value) { + Some(rhs_place) => { + self.inside_assignment = true; + self.consume_with_pat(rhs_place, target); + self.inside_assignment = false; + } + None => self.body.walk_pats(target, &mut |pat| match &self.body[pat] { + Pat::Path(path) => self.mutate_path_pat(path, pat), + &Pat::Expr(expr) => { + let place = self.place_of_expr(expr); + self.mutate_expr(expr, place); + } + _ => {} + }), + } + self.resolver.reset_to_guard(resolver_guard); + } + + Expr::Missing + | Expr::Continue { .. } + | Expr::Path(_) + | Expr::Literal(_) + | Expr::Const(_) + | Expr::Underscore => (), + } + } + + fn walk_pat(&mut self, result: &mut Option, pat: PatId) { + let mut update_result = |ck: CaptureKind| match result { + Some(r) => { + *r = cmp::max(*r, ck); + } + None => *result = Some(ck), + }; + + self.walk_pat_inner( + pat, + &mut update_result, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + ); + } + + fn walk_pat_inner( + &mut self, + p: PatId, + update_result: &mut impl FnMut(CaptureKind), + mut for_mut: BorrowKind, + ) { + match &self.body[p] { + Pat::Ref { .. } + | Pat::Box { .. } + | Pat::Missing + | Pat::Wild + | Pat::Tuple { .. } + | Pat::Expr(_) + | Pat::Or(_) => (), + Pat::TupleStruct { .. } | Pat::Record { .. } => { + if let Some(variant) = self.result.variant_resolution_for_pat(p) { + let adt = variant.adt_id(self.db); + let is_multivariant = match adt { + hir_def::AdtId::EnumId(e) => e.enum_variants(self.db).variants.len() != 1, + _ => false, + }; + if is_multivariant { + update_result(CaptureKind::ByRef(BorrowKind::Shared)); + } + } + } + Pat::Slice { .. } + | Pat::ConstBlock(_) + | Pat::Path(_) + | Pat::Lit(_) + | Pat::Range { .. } => { + update_result(CaptureKind::ByRef(BorrowKind::Shared)); + } + Pat::Bind { id, .. } => match self.result.binding_modes[p] { + crate::BindingMode::Move => { + if self.is_ty_copy(self.result.type_of_binding[*id].clone()) { + update_result(CaptureKind::ByRef(BorrowKind::Shared)); + } else { + update_result(CaptureKind::ByValue); + } + } + crate::BindingMode::Ref(r) => match r { + Mutability::Mut => update_result(CaptureKind::ByRef(for_mut)), + Mutability::Not => update_result(CaptureKind::ByRef(BorrowKind::Shared)), + }, + }, + } + if self.result.pat_adjustments.get(&p).is_some_and(|it| !it.is_empty()) { + for_mut = BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }; + } + self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut)); + } + + fn expr_ty(&self, expr: ExprId) -> Ty { + self.result[expr].clone() + } + + fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { + let mut ty = None; + if let Some(it) = self.result.expr_adjustments.get(&e) + && let Some(it) = it.last() + { + ty = Some(it.target.clone()); + } + ty.unwrap_or_else(|| self.expr_ty(e)) + } + + fn is_upvar(&self, place: &HirPlace) -> bool { + if let Some(c) = self.current_closure { + let InternedClosure(_, root) = self.db.lookup_intern_closure(c); + return self.body.is_binding_upvar(place.local, root); + } + false + } + + fn is_ty_copy(&mut self, ty: Ty) -> bool { + if let TyKind::Closure(id, _) = ty.kind(Interner) { + // FIXME: We handle closure as a special case, since chalk consider every closure as copy. We + // should probably let chalk know which closures are copy, but I don't know how doing it + // without creating query cycles. + return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true); + } + self.table.resolve_completely(ty).is_copy(self.db, self.owner) + } + + fn select_from_expr(&mut self, expr: ExprId) { + self.walk_expr(expr); + } + + fn restrict_precision_for_unsafe(&mut self) { + // FIXME: Borrow checker problems without this. + let mut current_captures = std::mem::take(&mut self.current_captures); + for capture in &mut current_captures { + let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone()); + if ty.as_raw_ptr().is_some() || ty.is_union() { + capture.kind = CaptureKind::ByRef(BorrowKind::Shared); + self.truncate_capture_spans(capture, 0); + capture.place.projections.truncate(0); + continue; + } + for (i, p) in capture.place.projections.iter().enumerate() { + ty = p.projected_ty( + ty, + self.db, + |_, _, _| { + unreachable!("Closure field only happens in MIR"); + }, + self.owner.module(self.db).krate(), + ); + if ty.as_raw_ptr().is_some() || ty.is_union() { + capture.kind = CaptureKind::ByRef(BorrowKind::Shared); + self.truncate_capture_spans(capture, i + 1); + capture.place.projections.truncate(i + 1); + break; + } + } + } + self.current_captures = current_captures; + } + + fn adjust_for_move_closure(&mut self) { + // FIXME: Borrow checker won't allow without this. + let mut current_captures = std::mem::take(&mut self.current_captures); + for capture in &mut current_captures { + if let Some(first_deref) = + capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref) + { + self.truncate_capture_spans(capture, first_deref); + capture.place.projections.truncate(first_deref); + } + capture.kind = CaptureKind::ByValue; + } + self.current_captures = current_captures; + } + + fn minimize_captures(&mut self) { + self.current_captures.sort_unstable_by_key(|it| it.place.projections.len()); + let mut hash_map = FxHashMap::::default(); + let result = mem::take(&mut self.current_captures); + for mut item in result { + let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] }; + let mut it = item.place.projections.iter(); + let prev_index = loop { + if let Some(k) = hash_map.get(&lookup_place) { + break Some(*k); + } + match it.next() { + Some(it) => { + lookup_place.projections.push(it.clone()); + } + None => break None, + } + }; + match prev_index { + Some(p) => { + let prev_projections_len = self.current_captures[p].place.projections.len(); + self.truncate_capture_spans(&mut item, prev_projections_len); + self.current_captures[p].span_stacks.extend(item.span_stacks); + let len = self.current_captures[p].place.projections.len(); + let kind_after_truncate = + item.place.capture_kind_of_truncated_place(item.kind, len); + self.current_captures[p].kind = + cmp::max(kind_after_truncate, self.current_captures[p].kind); + } + None => { + hash_map.insert(item.place.clone(), self.current_captures.len()); + self.current_captures.push(item); + } + } + } + } + + fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) { + let adjustments_count = + self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default(); + place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref)); + self.current_capture_span_stack + .extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat))); + 'reset_span_stack: { + match &self.body[tgt_pat] { + Pat::Missing | Pat::Wild => (), + Pat::Tuple { args, ellipsis } => { + let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); + let field_count = match self.result[tgt_pat].kind(Interner) { + TyKind::Tuple(_, s) => s.len(Interner), + _ => break 'reset_span_stack, + }; + let fields = 0..field_count; + let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); + for (&arg, i) in it { + let mut p = place.clone(); + self.current_capture_span_stack.push(MirSpan::PatId(arg)); + p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId { + tuple: TupleId(!0), // dummy this, as its unused anyways + index: i as u32, + }))); + self.consume_with_pat(p, arg); + self.current_capture_span_stack.pop(); + } + } + Pat::Or(pats) => { + for pat in pats.iter() { + self.consume_with_pat(place.clone(), *pat); + } + } + Pat::Record { args, .. } => { + let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else { + break 'reset_span_stack; + }; + match variant { + VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { + self.consume_place(place) + } + VariantId::StructId(s) => { + let vd = s.fields(self.db); + for field_pat in args.iter() { + let arg = field_pat.pat; + let Some(local_id) = vd.field(&field_pat.name) else { + continue; + }; + let mut p = place.clone(); + self.current_capture_span_stack.push(MirSpan::PatId(arg)); + p.projections.push(ProjectionElem::Field(Either::Left(FieldId { + parent: variant, + local_id, + }))); + self.consume_with_pat(p, arg); + self.current_capture_span_stack.pop(); + } + } + } + } + Pat::Range { .. } | Pat::Slice { .. } | Pat::ConstBlock(_) | Pat::Lit(_) => { + self.consume_place(place) + } + Pat::Path(path) => { + if self.inside_assignment { + self.mutate_path_pat(path, tgt_pat); + } + self.consume_place(place); + } + &Pat::Bind { id, subpat: _ } => { + let mode = self.result.binding_modes[tgt_pat]; + let capture_kind = match mode { + BindingMode::Move => { + self.consume_place(place); + break 'reset_span_stack; + } + BindingMode::Ref(Mutability::Not) => BorrowKind::Shared, + BindingMode::Ref(Mutability::Mut) => { + BorrowKind::Mut { kind: MutBorrowKind::Default } + } + }; + self.current_capture_span_stack.push(MirSpan::BindingId(id)); + self.add_capture(place, CaptureKind::ByRef(capture_kind)); + self.current_capture_span_stack.pop(); + } + Pat::TupleStruct { path: _, args, ellipsis } => { + let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else { + break 'reset_span_stack; + }; + match variant { + VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { + self.consume_place(place) + } + VariantId::StructId(s) => { + let vd = s.fields(self.db); + let (al, ar) = + args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); + let fields = vd.fields().iter(); + let it = al + .iter() + .zip(fields.clone()) + .chain(ar.iter().rev().zip(fields.rev())); + for (&arg, (i, _)) in it { + let mut p = place.clone(); + self.current_capture_span_stack.push(MirSpan::PatId(arg)); + p.projections.push(ProjectionElem::Field(Either::Left(FieldId { + parent: variant, + local_id: i, + }))); + self.consume_with_pat(p, arg); + self.current_capture_span_stack.pop(); + } + } + } + } + Pat::Ref { pat, mutability: _ } => { + self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat)); + place.projections.push(ProjectionElem::Deref); + self.consume_with_pat(place, *pat); + self.current_capture_span_stack.pop(); + } + Pat::Box { .. } => (), // not supported + &Pat::Expr(expr) => { + self.consume_place(place); + let pat_capture_span_stack = mem::take(&mut self.current_capture_span_stack); + let old_inside_assignment = mem::replace(&mut self.inside_assignment, false); + let lhs_place = self.place_of_expr(expr); + self.mutate_expr(expr, lhs_place); + self.inside_assignment = old_inside_assignment; + self.current_capture_span_stack = pat_capture_span_stack; + } + } + } + self.current_capture_span_stack + .truncate(self.current_capture_span_stack.len() - adjustments_count); + } + + fn consume_exprs(&mut self, exprs: impl Iterator) { + for expr in exprs { + self.consume_expr(expr); + } + } + + fn closure_kind(&self) -> FnTrait { + let mut r = FnTrait::Fn; + for it in &self.current_captures { + r = cmp::min( + r, + match &it.kind { + CaptureKind::ByRef(BorrowKind::Mut { .. }) => FnTrait::FnMut, + CaptureKind::ByRef(BorrowKind::Shallow | BorrowKind::Shared) => FnTrait::Fn, + CaptureKind::ByValue => FnTrait::FnOnce, + }, + ) + } + r + } + + fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait { + let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into()); + self.current_closure = Some(closure.into()); + let Expr::Closure { body, capture_by, .. } = &self.body[root] else { + unreachable!("Closure expression id is always closure"); + }; + self.consume_expr(*body); + for item in &self.current_captures { + if matches!( + item.kind, + CaptureKind::ByRef(BorrowKind::Mut { + kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow + }) + ) && !item.place.projections.contains(&ProjectionElem::Deref) + { + // FIXME: remove the `mutated_bindings_in_closure` completely and add proper fake reads in + // MIR. I didn't do that due duplicate diagnostics. + self.result.mutated_bindings_in_closure.insert(item.place.local); + } + } + self.restrict_precision_for_unsafe(); + // `closure_kind` should be done before adjust_for_move_closure + // If there exists pre-deduced kind of a closure, use it instead of one determined by capture, as rustc does. + // rustc also does diagnostics here if the latter is not a subtype of the former. + let closure_kind = self + .result + .closure_info + .get(&closure) + .map_or_else(|| self.closure_kind(), |info| info.1); + match capture_by { + CaptureBy::Value => self.adjust_for_move_closure(), + CaptureBy::Ref => (), + } + self.minimize_captures(); + self.strip_captures_ref_span(); + let result = mem::take(&mut self.current_captures); + let captures = result.into_iter().map(|it| it.with_ty(self)).collect::>(); + self.result.closure_info.insert(closure, (captures, closure_kind)); + closure_kind + } + + fn strip_captures_ref_span(&mut self) { + // FIXME: Borrow checker won't allow without this. + let mut captures = std::mem::take(&mut self.current_captures); + for capture in &mut captures { + if matches!(capture.kind, CaptureKind::ByValue) { + for span_stack in &mut capture.span_stacks { + if span_stack[span_stack.len() - 1].is_ref_span(self.body) { + span_stack.truncate(span_stack.len() - 1); + } + } + } + } + self.current_captures = captures; + } + + pub(crate) fn infer_closures(&mut self) { + let deferred_closures = self.sort_closures(); + for (closure, exprs) in deferred_closures.into_iter().rev() { + self.current_captures = vec![]; + let kind = self.analyze_closure(closure); + + for (derefed_callee, callee_ty, params, expr) in exprs { + if let &Expr::Call { callee, .. } = &self.body[expr] { + let mut adjustments = + self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec(); + self.write_fn_trait_method_resolution( + kind, + &derefed_callee, + &mut adjustments, + &callee_ty, + ¶ms, + expr, + ); + self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice()); + } + } + } + } + + /// We want to analyze some closures before others, to have a correct analysis: + /// * We should analyze nested closures before the parent, since the parent should capture some of + /// the things that its children captures. + /// * If a closure calls another closure, we need to analyze the callee, to find out how we should + /// capture it (e.g. by move for FnOnce) + /// + /// These dependencies are collected in the main inference. We do a topological sort in this function. It + /// will consume the `deferred_closures` field and return its content in a sorted vector. + fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec, ExprId)>)> { + let mut deferred_closures = mem::take(&mut self.deferred_closures); + let mut dependents_count: FxHashMap = + deferred_closures.keys().map(|it| ((*it).into(), 0)).collect(); + for deps in self.closure_dependencies.values() { + for dep in deps { + *dependents_count.entry((*dep).into()).or_default() += 1; + } + } + let mut queue: Vec<_> = deferred_closures + .keys() + .copied() + .filter(|&it| dependents_count[&it.into()] == 0) + .collect(); + let mut result = vec![]; + while let Some(it) = queue.pop() { + if let Some(d) = deferred_closures.remove(&it) { + result.push((it.into(), d)); + } + for &dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) { + let cnt = dependents_count.get_mut(&dep.into()).unwrap(); + *cnt -= 1; + if *cnt == 0 { + queue.push(dep); + } + } + } + assert!(deferred_closures.is_empty(), "we should have analyzed all closures"); + result + } + + pub(crate) fn add_current_closure_dependency(&mut self, dep: InternedClosureId) { + if let Some(c) = self.current_closure + && !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) + { + self.closure_dependencies.entry(c).or_default().push(dep); + } + + fn dep_creates_cycle( + closure_dependencies: &FxHashMap>, + visited: &mut FxHashSet, + from: InternedClosureId, + to: InternedClosureId, + ) -> bool { + if !visited.insert(from) { + return false; + } + + if from == to { + return true; + } + + if let Some(deps) = closure_dependencies.get(&to) { + for dep in deps { + if dep_creates_cycle(closure_dependencies, visited, from, *dep) { + return true; + } + } + } + + false + } + } +} + +/// Call this only when the last span in the stack isn't a split. +fn apply_adjusts_to_place( + current_capture_span_stack: &mut Vec, + mut r: HirPlace, + adjustments: &[Adjustment], +) -> Option { + let span = *current_capture_span_stack.last().expect("empty capture span stack"); + for adj in adjustments { + match &adj.kind { + Adjust::Deref(None) => { + current_capture_span_stack.push(span); + r.projections.push(ProjectionElem::Deref); + } + _ => return None, + } + } + Some(r) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 761a2564aa799..7930d8b0ed68f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -1,445 +1,388 @@ -//! Coercion logic. Coercions are certain type conversions that can implicitly -//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions -//! like going from `&Vec` to `&[T]`. +//! # Type Coercion //! -//! See and -//! `rustc_hir_analysis/check/coercion.rs`. - -use std::iter; - -use chalk_ir::{BoundVar, Goal, Mutability, TyKind, TyVariableKind, cast::Cast}; -use hir_def::{hir::ExprId, lang_item::LangItem}; -use stdx::always; +//! Under certain circumstances we will coerce from one type to another, +//! for example by auto-borrowing. This occurs in situations where the +//! compiler has a firm 'expected type' that was supplied from the user, +//! and where the actual type is similar to that expected type in purpose +//! but not in representation (so actual subtyping is inappropriate). +//! +//! ## Reborrowing +//! +//! Note that if we are expecting a reference, we will *reborrow* +//! even if the argument provided was already a reference. This is +//! useful for freezing mut things (that is, when the expected type is &T +//! but you have &mut T) and also for avoiding the linearity +//! of mut things (when the expected is &mut T and you have &mut T). See +//! the various `tests/ui/coerce/*.rs` tests for +//! examples of where this is useful. +//! +//! ## Subtle note +//! +//! When inferring the generic arguments of functions, the argument +//! order is relevant, which can lead to the following edge case: +//! +//! ```ignore (illustrative) +//! fn foo(a: T, b: T) { +//! // ... +//! } +//! +//! foo(&7i32, &mut 7i32); +//! // This compiles, as we first infer `T` to be `&i32`, +//! // and then coerce `&mut 7i32` to `&7i32`. +//! +//! foo(&mut 7i32, &7i32); +//! // This does not compile, as we first infer `T` to be `&mut i32` +//! // and are then unable to coerce `&7i32` to `&mut i32`. +//! ``` + +use chalk_ir::cast::Cast; +use hir_def::{ + CallableDefId, + hir::{ExprId, ExprOrPatId}, + lang_item::LangItem, + signatures::FunctionSignature, +}; +use intern::sym; +use rustc_ast_ir::Mutability; +use rustc_type_ir::{ + TypeAndMut, + error::TypeError, + inherent::{IntoKind, Safety, Ty as _}, +}; +use smallvec::{SmallVec, smallvec}; +use tracing::{debug, instrument}; use triomphe::Arc; use crate::{ - Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime, - Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, - autoderef::{Autoderef, AutoderefKind}, - db::HirDatabase, - infer::{ - Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast, - TypeError, TypeMismatch, + Adjust, Adjustment, AutoBorrow, Interner, PointerCast, TargetFeatures, TraitEnvironment, + autoderef::Autoderef, + db::{HirDatabase, InternedClosureId}, + infer::{AllowTwoPhase, InferenceContext, TypeMismatch, unify::InferenceTable}, + next_solver::{ + Binder, CallableIdWrapper, ClauseKind, CoercePredicate, DbInterner, ErrorGuaranteed, + GenericArgs, PolyFnSig, PredicateKind, Region, SolverDefId, TraitRef, Ty, TyKind, + infer::{ + DefineOpaqueTypes, InferCtxt, InferOk, InferResult, + relate::RelateResult, + select::{ImplSource, SelectionError}, + traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations}, + }, + mapping::{ChalkToNextSolver, NextSolverToChalk}, + obligation_ctxt::ObligationCtxt, }, - utils::ClosureSubst, + utils::TargetFeatureIsSafeInTarget, }; -use super::unify::InferenceTable; - -pub(crate) type CoerceResult = Result, Ty)>, TypeError>; - -/// Do not require any adjustments, i.e. coerce `x -> x`. -fn identity(_: Ty) -> Vec { - vec![] +struct Coerce<'a, 'b, 'db> { + table: &'a mut InferenceTable<'db>, + has_errors: &'a mut bool, + target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget), + use_lub: bool, + /// Determines whether or not allow_two_phase_borrow is set on any + /// autoref adjustments we create while coercing. We don't want to + /// allow deref coercions to create two-phase borrows, at least initially, + /// but we do need two-phase borrows for function argument reborrows. + /// See rust#47489 and rust#48598 + /// See docs on the "AllowTwoPhase" type for a more detailed discussion + allow_two_phase: AllowTwoPhase, + /// Whether we allow `NeverToAny` coercions. This is unsound if we're + /// coercing a place expression without it counting as a read in the MIR. + /// This is a side-effect of HIR not really having a great distinction + /// between places and values. + coerce_never: bool, + cause: ObligationCause, } -fn simple(kind: Adjust) -> impl FnOnce(Ty) -> Vec { - move |target| vec![Adjustment { kind, target }] +type CoerceResult<'db> = InferResult<'db, (Vec, Ty<'db>)>; + +/// Coercing a mutable reference to an immutable works, while +/// coercing `&T` to `&mut T` should be forbidden. +fn coerce_mutbls<'db>(from_mutbl: Mutability, to_mutbl: Mutability) -> RelateResult<'db, ()> { + if from_mutbl >= to_mutbl { Ok(()) } else { Err(TypeError::Mutability) } } /// This always returns `Ok(...)`. -fn success( +fn success<'db>( adj: Vec, - target: Ty, - goals: Vec>>, -) -> CoerceResult { - Ok(InferOk { goals, value: (adj, target) }) -} - -pub(super) enum CoercionCause { - // FIXME: Make better use of this. Right now things like return and break without a value - // use it to point to themselves, causing us to report a mismatch on those expressions even - // though technically they themselves are `!` - Expr(ExprId), -} - -#[derive(Clone, Debug)] -pub(super) struct CoerceMany { - expected_ty: Ty, - final_ty: Option, - expressions: Vec, + target: Ty<'db>, + obligations: PredicateObligations<'db>, +) -> CoerceResult<'db> { + Ok(InferOk { value: (adj, target), obligations }) } -impl CoerceMany { - pub(super) fn new(expected: Ty) -> Self { - CoerceMany { expected_ty: expected, final_ty: None, expressions: vec![] } - } - - /// Returns the "expected type" with which this coercion was - /// constructed. This represents the "downward propagated" type - /// that was given to us at the start of typing whatever construct - /// we are typing (e.g., the match expression). - /// - /// Typically, this is used as the expected type when - /// type-checking each of the alternative expressions whose types - /// we are trying to merge. - pub(super) fn expected_ty(&self) -> Ty { - self.expected_ty.clone() - } - - /// Returns the current "merged type", representing our best-guess - /// at the LUB of the expressions we've seen so far (if any). This - /// isn't *final* until you call `self.complete()`, which will return - /// the merged type. - pub(super) fn merged_ty(&self) -> Ty { - self.final_ty.clone().unwrap_or_else(|| self.expected_ty.clone()) +impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> { + #[inline] + fn set_tainted_by_errors(&mut self) { + *self.has_errors = true; } - pub(super) fn complete(self, ctx: &mut InferenceContext<'_>) -> Ty { - if let Some(final_ty) = self.final_ty { - final_ty - } else { - ctx.result.standard_types.never.clone() - } + #[inline] + fn interner(&self) -> DbInterner<'db> { + self.table.interner } - pub(super) fn coerce_forced_unit( - &mut self, - ctx: &mut InferenceContext<'_>, - cause: CoercionCause, - ) { - self.coerce(ctx, None, &ctx.result.standard_types.unit.clone(), cause) + #[inline] + fn infer_ctxt(&self) -> &InferCtxt<'db> { + &self.table.infer_ctxt } - /// Merge two types from different branches, with possible coercion. - /// - /// Mostly this means trying to coerce one to the other, but - /// - if we have two function types for different functions or closures, we need to - /// coerce both to function pointers; - /// - if we were concerned with lifetime subtyping, we'd need to look for a - /// least upper bound. - pub(super) fn coerce( + pub(crate) fn commit_if_ok( &mut self, - ctx: &mut InferenceContext<'_>, - expr: Option, - expr_ty: &Ty, - cause: CoercionCause, - ) { - let expr_ty = ctx.resolve_ty_shallow(expr_ty); - self.expected_ty = ctx.resolve_ty_shallow(&self.expected_ty); - - // Special case: two function types. Try to coerce both to - // pointers to have a chance at getting a match. See - // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916 - let sig = match (self.merged_ty().kind(Interner), expr_ty.kind(Interner)) { - (TyKind::FnDef(x, _), TyKind::FnDef(y, _)) - if x == y && ctx.table.unify(&self.merged_ty(), &expr_ty) => - { - None - } - (TyKind::Closure(x, _), TyKind::Closure(y, _)) if x == y => None, - (TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => { - // FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure, - // we should be coercing the closure to a fn pointer of the safety of the FnDef - cov_mark::hit!(coerce_fn_reification); - let sig = - self.merged_ty().callable_sig(ctx.db).expect("FnDef without callable sig"); - Some(sig) - } - _ => None, - }; - if let Some(sig) = sig { - let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner); - let result1 = ctx.table.coerce_inner(self.merged_ty(), &target_ty, CoerceNever::Yes); - let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty, CoerceNever::Yes); - if let (Ok(result1), Ok(result2)) = (result1, result2) { - ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals }); - for &e in &self.expressions { - ctx.write_expr_adj(e, result1.value.0.clone().into_boxed_slice()); - } - ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals }); - if let Some(expr) = expr { - ctx.write_expr_adj(expr, result2.value.0.into_boxed_slice()); - self.expressions.push(expr); - } - return self.final_ty = Some(target_ty); + f: impl FnOnce(&mut Self) -> Result, + ) -> Result { + let snapshot = self.table.snapshot(); + let result = f(self); + match result { + Ok(_) => {} + Err(_) => { + self.table.rollback_to(snapshot); } } + result + } - // It might not seem like it, but order is important here: If the expected - // type is a type variable and the new one is `!`, trying it the other - // way around first would mean we make the type variable `!`, instead of - // just marking it as possibly diverging. - // - // - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335) - // First try to coerce the new expression to the type of the previous ones, - // but only if the new expression has no coercion already applied to it. - if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) - && let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) - { - self.final_ty = Some(res); - if let Some(expr) = expr { - self.expressions.push(expr); - } - return; - } + fn unify_raw(&mut self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> { + debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub); + self.commit_if_ok(|this| { + let at = this.infer_ctxt().at(&this.cause, this.table.param_env); - if let Ok((adjustments, res)) = - ctx.coerce_inner(&self.merged_ty(), &expr_ty, CoerceNever::Yes) - { - self.final_ty = Some(res); - for &e in &self.expressions { - ctx.write_expr_adj(e, adjustments.clone().into_boxed_slice()); - } - } else { - match cause { - CoercionCause::Expr(id) => { - ctx.result.type_mismatches.insert( - id.into(), - TypeMismatch { expected: self.merged_ty(), actual: expr_ty.clone() }, - ); + let res = if this.use_lub { + at.lub(b, a) + } else { + at.sup(DefineOpaqueTypes::Yes, b, a) + .map(|InferOk { value: (), obligations }| InferOk { value: b, obligations }) + }; + + // In the new solver, lazy norm may allow us to shallowly equate + // more types, but we emit possibly impossible-to-satisfy obligations. + // Filter these cases out to make sure our coercion is more accurate. + match res { + Ok(InferOk { value, obligations }) => { + let mut ocx = ObligationCtxt::new(this.infer_ctxt()); + ocx.register_obligations(obligations); + if ocx.select_where_possible().is_empty() { + Ok(InferOk { value, obligations: ocx.into_pending_obligations() }) + } else { + Err(TypeError::Mismatch) + } } + res => res, } - cov_mark::hit!(coerce_merge_fail_fallback); - } - if let Some(expr) = expr { - self.expressions.push(expr); - } + }) } -} - -pub fn could_coerce( - db: &dyn HirDatabase, - env: Arc, - tys: &Canonical<(Ty, Ty)>, -) -> bool { - coerce(db, env, tys).is_ok() -} - -pub(crate) fn coerce( - db: &dyn HirDatabase, - env: Arc, - tys: &Canonical<(Ty, Ty)>, -) -> Result<(Vec, Ty), TypeError> { - let mut table = InferenceTable::new(db, env); - let vars = table.fresh_subst(tys.binders.as_slice(Interner)); - let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); - let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); - let (adjustments, ty) = table.coerce(&ty1_with_vars, &ty2_with_vars, CoerceNever::Yes)?; - // default any type vars that weren't unified back to their original bound vars - // (kind of hacky) - let find_var = |iv| { - vars.iter(Interner).position(|v| match v.interned() { - chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner), - chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner), - chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner), - } == Some(iv)) - }; - let fallback = |iv, kind, default, binder| match kind { - chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv) - .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)), - chalk_ir::VariableKind::Lifetime => find_var(iv) - .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)), - chalk_ir::VariableKind::Const(ty) => find_var(iv) - .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)), - }; - // FIXME also map the types in the adjustments - Ok((adjustments, table.resolve_with_fallback(ty, &fallback))) -} -#[derive(Clone, Copy, PartialEq, Eq)] -pub(crate) enum CoerceNever { - Yes, - No, -} - -impl InferenceContext<'_> { - /// Unify two types, but may coerce the first one to the second one - /// using "implicit coercion rules" if needed. - pub(super) fn coerce( - &mut self, - expr: Option, - from_ty: &Ty, - to_ty: &Ty, - // [Comment from rustc](https://github.com/rust-lang/rust/blob/4cc494bbfe9911d24f3ee521f98d5c6bb7e3ffe8/compiler/rustc_hir_typeck/src/coercion.rs#L85-L89) - // Whether we allow `NeverToAny` coercions. This is unsound if we're - // coercing a place expression without it counting as a read in the MIR. - // This is a side-effect of HIR not really having a great distinction - // between places and values. - coerce_never: CoerceNever, - ) -> Result { - let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?; - if let Some(expr) = expr { - self.write_expr_adj(expr, adjustments.into_boxed_slice()); - } - Ok(ty) + /// Unify two types (using sub or lub). + fn unify(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> { + self.unify_raw(a, b) + .and_then(|InferOk { value: ty, obligations }| success(vec![], ty, obligations)) } - fn coerce_inner( + /// Unify two types (using sub or lub) and produce a specific coercion. + fn unify_and( &mut self, - from_ty: &Ty, - to_ty: &Ty, - coerce_never: CoerceNever, - ) -> Result<(Vec, Ty), TypeError> { - let from_ty = self.resolve_ty_shallow(from_ty); - let to_ty = self.resolve_ty_shallow(to_ty); - self.table.coerce(&from_ty, &to_ty, coerce_never) + a: Ty<'db>, + b: Ty<'db>, + adjustments: impl IntoIterator, + final_adjustment: Adjust, + ) -> CoerceResult<'db> { + self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| { + success( + adjustments + .into_iter() + .chain(std::iter::once(Adjustment { + target: ty.to_chalk(self.interner()), + kind: final_adjustment, + })) + .collect(), + ty, + obligations, + ) + }) } -} -impl InferenceTable<'_> { - /// Unify two types, but may coerce the first one to the second one - /// using "implicit coercion rules" if needed. - pub(crate) fn coerce( - &mut self, - from_ty: &Ty, - to_ty: &Ty, - coerce_never: CoerceNever, - ) -> Result<(Vec, Ty), TypeError> { - let from_ty = self.resolve_ty_shallow(from_ty); - let to_ty = self.resolve_ty_shallow(to_ty); - match self.coerce_inner(from_ty, &to_ty, coerce_never) { - Ok(InferOk { value: (adjustments, ty), goals }) => { - self.register_infer_ok(InferOk { value: (), goals }); - Ok((adjustments, ty)) - } - Err(e) => { - // FIXME deal with error - Err(e) + #[instrument(skip(self))] + fn coerce(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> { + // First, remove any resolved type variables (at the top level, at least): + let a = self.table.shallow_resolve(a); + let b = self.table.shallow_resolve(b); + debug!("Coerce.tys({:?} => {:?})", a, b); + + // Coercing from `!` to any type is allowed: + if a.is_never() { + // If we're coercing into an inference var, mark it as possibly diverging. + // FIXME: rustc does this differently. + if let TyKind::Infer(rustc_type_ir::TyVar(b)) = b.kind() { + self.table.set_diverging(b.as_u32().into(), chalk_ir::TyVariableKind::General); } - } - } - fn coerce_inner(&mut self, from_ty: Ty, to_ty: &Ty, coerce_never: CoerceNever) -> CoerceResult { - if from_ty.is_never() { - if let TyKind::InferenceVar(tv, TyVariableKind::General) = to_ty.kind(Interner) { - self.set_diverging(*tv, true); - } - if coerce_never == CoerceNever::Yes { - // Subtle: If we are coercing from `!` to `?T`, where `?T` is an unbound - // type variable, we want `?T` to fallback to `!` if not - // otherwise constrained. An example where this arises: - // - // let _: Option = Some({ return; }); - // - // here, we would coerce from `!` to `?T`. - return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]); + if self.coerce_never { + return success( + vec![Adjustment { + kind: Adjust::NeverToAny, + target: b.to_chalk(self.interner()), + }], + b, + PredicateObligations::new(), + ); } else { - return self.unify_and(&from_ty, to_ty, identity); + // Otherwise the only coercion we can do is unification. + return self.unify(a, b); } } // If we are coercing into a TAIT, coerce into its proxy inference var, instead. - let mut to_ty = to_ty; - let _to; - if let Some(tait_table) = &self.tait_coercion_table - && let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) - && !matches!(from_ty.kind(Interner), TyKind::InferenceVar(..) | TyKind::OpaqueType(..)) - && let Some(ty) = tait_table.get(opaque_ty_id) + // FIXME(next-solver): This should not be here. This is not how rustc does thing, and it also not allows us + // to normalize opaques defined in our scopes. Instead, we should properly register + // `TypingMode::Analysis::defining_opaque_types_and_generators`, and rely on the solver to reveal + // them for us (we'll also need some global-like registry for the values, something we cannot + // really implement, therefore we can really support only RPITs and ITIAT or the new `#[define_opaque]` + // TAIT, not the old global TAIT). + let mut b = b; + if let Some(tait_table) = &self.table.tait_coercion_table + && let TyKind::Alias(rustc_type_ir::Opaque, opaque_ty) = b.kind() + && let SolverDefId::InternedOpaqueTyId(opaque_ty_id) = opaque_ty.def_id + && !matches!(a.kind(), TyKind::Infer(..) | TyKind::Alias(rustc_type_ir::Opaque, _)) + && let Some(ty) = tait_table.get(&opaque_ty_id.into()) { - _to = ty.clone(); - to_ty = &_to; + b = ty.to_nextsolver(self.interner()); + b = self.table.shallow_resolve(b); + } + let b = b; + + // Coercing *from* an unresolved inference variable means that + // we have no information about the source type. This will always + // ultimately fall back to some form of subtyping. + if a.is_infer() { + return self.coerce_from_inference_variable(a, b); } // Consider coercing the subtype to a DST - if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) { - return Ok(ret); + // + // NOTE: this is wrapped in a `commit_if_ok` because it creates + // a "spurious" type variable, and we don't want to have that + // type variable in memory if the coercion fails. + let unsize = self.commit_if_ok(|this| this.coerce_unsized(a, b)); + match unsize { + Ok(_) => { + debug!("coerce: unsize successful"); + return unsize; + } + Err(error) => { + debug!(?error, "coerce: unsize failed"); + } } - // Examine the supertype and consider auto-borrowing. - match to_ty.kind(Interner) { - TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt), - TyKind::Ref(mt, lt, _) => return self.coerce_ref(from_ty, to_ty, *mt, lt), + // Examine the supertype and consider type-specific coercions, such + // as auto-borrowing, coercing pointer mutability, a `dyn*` coercion, + // or pin-ergonomics. + match b.kind() { + TyKind::RawPtr(_, b_mutbl) => { + return self.coerce_raw_ptr(a, b, b_mutbl); + } + TyKind::Ref(r_b, _, mutbl_b) => { + return self.coerce_borrowed_pointer(a, b, r_b, mutbl_b); + } _ => {} } - match from_ty.kind(Interner) { + match a.kind() { TyKind::FnDef(..) => { // Function items are coercible to any closure // type; function pointers are not (that would // require double indirection). // Additionally, we permit coercion of function // items to drop the unsafe qualifier. - self.coerce_from_fn_item(from_ty, to_ty) + self.coerce_from_fn_item(a, b) } - TyKind::Function(from_fn_ptr) => { + TyKind::FnPtr(a_sig_tys, a_hdr) => { // We permit coercion of fn pointers to drop the // unsafe qualifier. - self.coerce_from_fn_pointer(from_ty.clone(), from_fn_ptr, to_ty) + self.coerce_from_fn_pointer(a_sig_tys.with(a_hdr), b) } - TyKind::Closure(_, from_substs) => { + TyKind::Closure(closure_def_id_a, args_a) => { // Non-capturing closures are coercible to // function pointers or unsafe function pointers. // It cannot convert closures that require unsafe. - self.coerce_closure_to_fn(from_ty.clone(), from_substs, to_ty) + self.coerce_closure_to_fn(a, closure_def_id_a.0, args_a, b) } _ => { // Otherwise, just use unification rules. - self.unify_and(&from_ty, to_ty, identity) + self.unify(a, b) } } } - /// Unify two types (using sub or lub) and produce a specific coercion. - fn unify_and(&mut self, t1: &Ty, t2: &Ty, f: F) -> CoerceResult - where - F: FnOnce(Ty) -> Vec, - { - self.try_unify(t1, t2) - .and_then(|InferOk { goals, .. }| success(f(t1.clone()), t1.clone(), goals)) - } - - fn coerce_ptr(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult { - let (is_ref, from_mt, from_inner) = match from_ty.kind(Interner) { - TyKind::Ref(mt, _, ty) => (true, mt, ty), - TyKind::Raw(mt, ty) => (false, mt, ty), - _ => return self.unify_and(&from_ty, to_ty, identity), - }; - - coerce_mutabilities(*from_mt, to_mt)?; - - // Check that the types which they point at are compatible. - let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner); + /// Coercing *from* an inference variable. In this case, we have no information + /// about the source type, so we can't really do a true coercion and we always + /// fall back to subtyping (`unify_and`). + fn coerce_from_inference_variable(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> { + debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b); + debug_assert!(a.is_infer() && self.table.shallow_resolve(a) == a); + debug_assert!(self.table.shallow_resolve(b) == b); + + if b.is_infer() { + // Two unresolved type variables: create a `Coerce` predicate. + let target_ty = if self.use_lub { self.table.next_ty_var() } else { b }; + + let mut obligations = PredicateObligations::with_capacity(2); + for &source_ty in &[a, b] { + if source_ty != target_ty { + obligations.push(Obligation::new( + self.interner(), + self.cause.clone(), + self.table.param_env, + Binder::dummy(PredicateKind::Coerce(CoercePredicate { + a: source_ty, + b: target_ty, + })), + )); + } + } - // Although references and raw ptrs have the same - // representation, we still register an Adjust::DerefRef so that - // regionck knows that the region for `a` must be valid here. - if is_ref { - self.unify_and(&from_raw, to_ty, |target| { - vec![ - Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() }, - Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)), target }, - ] - }) - } else if *from_mt != to_mt { - self.unify_and( - &from_raw, - to_ty, - simple(Adjust::Pointer(PointerCast::MutToConstPointer)), - ) + debug!( + "coerce_from_inference_variable: two inference variables, target_ty={:?}, obligations={:?}", + target_ty, obligations + ); + success(vec![], target_ty, obligations) } else { - self.unify_and(&from_raw, to_ty, identity) + // One unresolved type variable: just apply subtyping, we may be able + // to do something useful. + self.unify(a, b) } } /// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`. /// To match `A` with `B`, autoderef will be performed, /// calling `deref`/`deref_mut` where necessary. - fn coerce_ref( + fn coerce_borrowed_pointer( &mut self, - from_ty: Ty, - to_ty: &Ty, - to_mt: Mutability, - to_lt: &Lifetime, - ) -> CoerceResult { - let (_from_lt, from_mt) = match from_ty.kind(Interner) { - TyKind::Ref(mt, lt, _) => { - coerce_mutabilities(*mt, to_mt)?; - (lt.clone(), *mt) // clone is probably not good? - } - _ => return self.unify_and(&from_ty, to_ty, identity), + a: Ty<'db>, + b: Ty<'db>, + r_b: Region<'db>, + mutbl_b: Mutability, + ) -> CoerceResult<'db> { + debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b); + debug_assert!(self.table.shallow_resolve(a) == a); + debug_assert!(self.table.shallow_resolve(b) == b); + + // If we have a parameter of type `&M T_a` and the value + // provided is `expr`, we will be adding an implicit borrow, + // meaning that we convert `f(expr)` to `f(&M *expr)`. Therefore, + // to type check, we will construct the type that `&M*expr` would + // yield. + + let (r_a, mt_a) = match a.kind() { + TyKind::Ref(r_a, ty, mutbl) => { + let mt_a = TypeAndMut::> { ty, mutbl }; + coerce_mutbls(mt_a.mutbl, mutbl_b)?; + (r_a, mt_a) + } + _ => return self.unify(a, b), }; - // NOTE: this code is mostly copied and adapted from rustc, and - // currently more complicated than necessary, carrying errors around - // etc.. This complication will become necessary when we actually track - // details of coercion errors though, so I think it's useful to leave - // the structure like it is. - - let snapshot = self.snapshot(); - - let mut autoderef = Autoderef::new(self, from_ty.clone(), false, false); let mut first_error = None; + let mut r_borrow_var = None; + let mut autoderef = Autoderef::new(self.table, a); let mut found = None; while let Some((referent_ty, autoderefs)) = autoderef.next() { @@ -449,7 +392,7 @@ impl InferenceTable<'_> { continue; } - // At this point, we have deref'd `a` to `referent_ty`. So + // At this point, we have deref'd `a` to `referent_ty`. So // imagine we are coercing from `&'a mut Vec` to `&'b mut [T]`. // In the autoderef loop for `&'a mut Vec`, we would get // three callbacks: @@ -471,11 +414,85 @@ impl InferenceTable<'_> { // compare those. Note that this means we use the target // mutability [1], since it may be that we are coercing // from `&mut T` to `&U`. - let lt = to_lt; // FIXME: Involve rustc LUB and SUB flag checks - let derefd_from_ty = TyKind::Ref(to_mt, lt.clone(), referent_ty).intern(Interner); - match autoderef.table.try_unify(&derefd_from_ty, to_ty) { - Ok(result) => { - found = Some(result.map(|()| derefd_from_ty)); + // + // One fine point concerns the region that we use. We + // choose the region such that the region of the final + // type that results from `unify` will be the region we + // want for the autoref: + // + // - if in sub mode, that means we want to use `'b` (the + // region from the target reference) for both + // pointers [2]. This is because sub mode (somewhat + // arbitrarily) returns the subtype region. In the case + // where we are coercing to a target type, we know we + // want to use that target type region (`'b`) because -- + // for the program to type-check -- it must be the + // smaller of the two. + // - One fine point. It may be surprising that we can + // use `'b` without relating `'a` and `'b`. The reason + // that this is ok is that what we produce is + // effectively a `&'b *x` expression (if you could + // annotate the region of a borrow), and regionck has + // code that adds edges from the region of a borrow + // (`'b`, here) into the regions in the borrowed + // expression (`*x`, here). (Search for "link".) + // - if in lub mode, things can get fairly complicated. The + // easiest thing is just to make a fresh + // region variable [4], which effectively means we defer + // the decision to region inference (and regionck, which will add + // some more edges to this variable). However, this can wind up + // creating a crippling number of variables in some cases -- + // e.g., #32278 -- so we optimize one particular case [3]. + // Let me try to explain with some examples: + // - The "running example" above represents the simple case, + // where we have one `&` reference at the outer level and + // ownership all the rest of the way down. In this case, + // we want `LUB('a, 'b)` as the resulting region. + // - However, if there are nested borrows, that region is + // too strong. Consider a coercion from `&'a &'x Rc` to + // `&'b T`. In this case, `'a` is actually irrelevant. + // The pointer we want is `LUB('x, 'b`). If we choose `LUB('a,'b)` + // we get spurious errors (`ui/regions-lub-ref-ref-rc.rs`). + // (The errors actually show up in borrowck, typically, because + // this extra edge causes the region `'a` to be inferred to something + // too big, which then results in borrowck errors.) + // - We could track the innermost shared reference, but there is already + // code in regionck that has the job of creating links between + // the region of a borrow and the regions in the thing being + // borrowed (here, `'a` and `'x`), and it knows how to handle + // all the various cases. So instead we just make a region variable + // and let regionck figure it out. + let r = if !self.use_lub { + r_b // [2] above + } else if autoderefs == 1 { + r_a // [3] above + } else { + if r_borrow_var.is_none() { + // create var lazily, at most once + let r = autoderef.table.next_region_var(); + r_borrow_var = Some(r); // [4] above + } + r_borrow_var.unwrap() + }; + let derefd_ty_a = Ty::new_ref( + autoderef.table.interner, + r, + referent_ty, + mutbl_b, // [1] above + ); + // We need to construct a new `Coerce` because of lifetimes. + let mut coerce = Coerce { + table: autoderef.table, + has_errors: self.has_errors, + target_features: self.target_features, + use_lub: self.use_lub, + allow_two_phase: self.allow_two_phase, + coerce_never: self.coerce_never, + cause: self.cause.clone(), + }; + match coerce.unify_raw(derefd_ty_a, b) { + Ok(ok) => { + found = Some(ok); break; } Err(err) => { @@ -491,18 +508,24 @@ impl InferenceTable<'_> { // (e.g., in example above, the failure from relating `Vec` // to the target type), since that should be the least // confusing. - let InferOk { value: ty, goals } = match found { - Some(d) => d, - None => { - self.rollback_to(snapshot); - let err = first_error.expect("coerce_borrowed_pointer had no error"); - return Err(err); + let Some(InferOk { value: ty, mut obligations }) = found else { + if let Some(first_error) = first_error { + debug!("coerce_borrowed_pointer: failed with err = {:?}", first_error); + return Err(first_error); + } else { + // This may happen in the new trait solver since autoderef requires + // the pointee to be structurally normalizable, or else it'll just bail. + // So when we have a type like `&`, then we get no + // autoderef steps (even though there should be at least one). That means + // we get no type mismatches, since the loop above just exits early. + return Err(TypeError::Mismatch); } }; - if ty == from_ty && from_mt == Mutability::Not && autoderef.step_count() == 1 { + + if ty == a && mt_a.mutbl.is_not() && autoderef.step_count() == 1 { // As a special case, if we would produce `&'a *x`, that's // a total no-op. We end up with the type `&'a T` just as - // we started with. In that case, just skip it + // we started with. In that case, just skip it // altogether. This is just an optimization. // // Note that for `&mut`, we DO want to reborrow -- @@ -511,284 +534,1091 @@ impl InferenceTable<'_> { // `self.x` both have `&mut `type would be a move of // `self.x`, but we auto-coerce it to `foo(&mut *self.x)`, // which is a borrow. - always!(to_mt == Mutability::Not); // can only coerce &T -> &U - return success(vec![], ty, goals); + assert!(mutbl_b.is_not()); // can only coerce &T -> &U + return success(vec![], ty, obligations); } - let mut adjustments = auto_deref_adjust_steps(&autoderef); + let InferOk { value: mut adjustments, obligations: o } = + autoderef.adjust_steps_as_infer_ok(); + obligations.extend(o); + + // Now apply the autoref. We have to extract the region out of + // the final ref type we got. + let TyKind::Ref(region, _, _) = ty.kind() else { + panic!("expected a ref type, got {:?}", ty); + }; adjustments.push(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(to_lt.clone(), to_mt)), - target: ty.clone(), + kind: Adjust::Borrow(AutoBorrow::Ref( + region.to_chalk(self.interner()), + mutbl_b.to_chalk(self.interner()), + )), + target: ty.to_chalk(self.interner()), }); - success(adjustments, ty, goals) + debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments); + + success(adjustments, ty, obligations) } - /// Attempts to coerce from the type of a Rust function item into a function pointer. - fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult { - match to_ty.kind(Interner) { - TyKind::Function(_) => { - let from_sig = from_ty.callable_sig(self.db).expect("FnDef had no sig"); - - // FIXME check ABI: Intrinsics are not coercible to function pointers - // FIXME Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396) - - // FIXME rustc normalizes assoc types in the sig here, not sure if necessary - - let from_sig = from_sig.to_fn_ptr(); - let from_fn_pointer = TyKind::Function(from_sig.clone()).intern(Interner); - let ok = self.coerce_from_safe_fn( - from_fn_pointer.clone(), - &from_sig, - to_ty, - |unsafe_ty| { - vec![ - Adjustment { - kind: Adjust::Pointer(PointerCast::ReifyFnPointer), - target: from_fn_pointer, - }, - Adjustment { - kind: Adjust::Pointer(PointerCast::UnsafeFnPointer), - target: unsafe_ty, - }, - ] + /// Performs [unsized coercion] by emulating a fulfillment loop on a + /// `CoerceUnsized` goal until all `CoerceUnsized` and `Unsize` goals + /// are successfully selected. + /// + /// [unsized coercion](https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions) + #[instrument(skip(self), level = "debug")] + fn coerce_unsized(&mut self, source: Ty<'db>, target: Ty<'db>) -> CoerceResult<'db> { + debug!(?source, ?target); + debug_assert!(self.table.shallow_resolve(source) == source); + debug_assert!(self.table.shallow_resolve(target) == target); + + // We don't apply any coercions incase either the source or target + // aren't sufficiently well known but tend to instead just equate + // them both. + if source.is_infer() { + debug!("coerce_unsized: source is a TyVar, bailing out"); + return Err(TypeError::Mismatch); + } + if target.is_infer() { + debug!("coerce_unsized: target is a TyVar, bailing out"); + return Err(TypeError::Mismatch); + } + + // This is an optimization because coercion is one of the most common + // operations that we do in typeck, since it happens at every assignment + // and call arg (among other positions). + // + // These targets are known to never be RHS in `LHS: CoerceUnsized`. + // That's because these are built-in types for which a core-provided impl + // doesn't exist, and for which a user-written impl is invalid. + // + // This is technically incomplete when users write impossible bounds like + // `where T: CoerceUnsized`, for example, but that trait is unstable + // and coercion is allowed to be incomplete. The only case where this matters + // is impossible bounds. + // + // Note that some of these types implement `LHS: Unsize`, but they + // do not implement *`CoerceUnsized`* which is the root obligation of the + // check below. + match target.kind() { + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Infer(rustc_type_ir::IntVar(_) | rustc_type_ir::FloatVar(_)) + | TyKind::Str + | TyKind::Array(_, _) + | TyKind::Slice(_) + | TyKind::FnDef(_, _) + | TyKind::FnPtr(_, _) + | TyKind::Dynamic(_, _) + | TyKind::Closure(_, _) + | TyKind::CoroutineClosure(_, _) + | TyKind::Coroutine(_, _) + | TyKind::CoroutineWitness(_, _) + | TyKind::Never + | TyKind::Tuple(_) => return Err(TypeError::Mismatch), + _ => {} + } + // Additionally, we ignore `&str -> &str` coercions, which happen very + // commonly since strings are one of the most used argument types in Rust, + // we do coercions when type checking call expressions. + if let TyKind::Ref(_, source_pointee, Mutability::Not) = source.kind() + && source_pointee.is_str() + && let TyKind::Ref(_, target_pointee, Mutability::Not) = target.kind() + && target_pointee.is_str() + { + return Err(TypeError::Mismatch); + } + + let traits = ( + LangItem::Unsize.resolve_trait(self.table.db, self.table.trait_env.krate), + LangItem::CoerceUnsized.resolve_trait(self.table.db, self.table.trait_env.krate), + ); + let (Some(unsize_did), Some(coerce_unsized_did)) = traits else { + debug!("missing Unsize or CoerceUnsized traits"); + return Err(TypeError::Mismatch); + }; + + // Note, we want to avoid unnecessary unsizing. We don't want to coerce to + // a DST unless we have to. This currently comes out in the wash since + // we can't unify [T] with U. But to properly support DST, we need to allow + // that, at which point we will need extra checks on the target here. + + // Handle reborrows before selecting `Source: CoerceUnsized`. + let reborrow = match (source.kind(), target.kind()) { + (TyKind::Ref(_, ty_a, mutbl_a), TyKind::Ref(_, _, mutbl_b)) => { + coerce_mutbls(mutbl_a, mutbl_b)?; + + let r_borrow = self.table.next_region_var(); + + // We don't allow two-phase borrows here, at least for initial + // implementation. If it happens that this coercion is a function argument, + // the reborrow in coerce_borrowed_ptr will pick it up. + // let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No); + let mutbl = mutbl_b.to_chalk(self.interner()); + + Some(( + Adjustment { + kind: Adjust::Deref(None), + target: ty_a.to_chalk(self.interner()), }, - simple(Adjust::Pointer(PointerCast::ReifyFnPointer)), - )?; + Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref( + r_borrow.to_chalk(self.interner()), + mutbl, + )), + target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b) + .to_chalk(self.interner()), + }, + )) + } + (TyKind::Ref(_, ty_a, mt_a), TyKind::RawPtr(_, mt_b)) => { + coerce_mutbls(mt_a, mt_b)?; + + Some(( + Adjustment { + kind: Adjust::Deref(None), + target: ty_a.to_chalk(self.interner()), + }, + Adjustment { + kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b.to_chalk(self.interner()))), + target: Ty::new_ptr(self.interner(), ty_a, mt_b).to_chalk(self.interner()), + }, + )) + } + _ => None, + }; + let coerce_source = + reborrow.as_ref().map_or(source, |(_, r)| r.target.to_nextsolver(self.interner())); + + // Setup either a subtyping or a LUB relationship between + // the `CoerceUnsized` target type and the expected type. + // We only have the latter, so we use an inference variable + // for the former and let type inference do the rest. + let coerce_target = self.table.next_ty_var(); + + let mut coercion = self.unify_and( + coerce_target, + target, + reborrow.into_iter().flat_map(|(deref, autoref)| [deref, autoref]), + Adjust::Pointer(PointerCast::Unsize), + )?; + + // Create an obligation for `Source: CoerceUnsized`. + let cause = self.cause.clone(); + + // Use a FIFO queue for this custom fulfillment procedure. + // + // A Vec (or SmallVec) is not a natural choice for a queue. However, + // this code path is hot, and this queue usually has a max length of 1 + // and almost never more than 3. By using a SmallVec we avoid an + // allocation, at the (very small) cost of (occasionally) having to + // shift subsequent elements down when removing the front element. + let mut queue: SmallVec<[PredicateObligation<'db>; 4]> = smallvec![Obligation::new( + self.interner(), + cause, + self.table.param_env, + TraitRef::new( + self.interner(), + coerce_unsized_did.into(), + [coerce_source, coerce_target] + ) + )]; + // Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid + // emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where + // inference might unify those two inner type variables later. + let traits = [coerce_unsized_did, unsize_did]; + while !queue.is_empty() { + let obligation = queue.remove(0); + let trait_pred = match obligation.predicate.kind().no_bound_vars() { + Some(PredicateKind::Clause(ClauseKind::Trait(trait_pred))) + if traits.contains(&trait_pred.def_id().0) => + { + self.infer_ctxt().resolve_vars_if_possible(trait_pred) + } + // Eagerly process alias-relate obligations in new trait solver, + // since these can be emitted in the process of solving trait goals, + // but we need to constrain vars before processing goals mentioning + // them. + Some(PredicateKind::AliasRelate(..)) => { + let mut ocx = ObligationCtxt::new(self.infer_ctxt()); + ocx.register_obligation(obligation); + if !ocx.select_where_possible().is_empty() { + return Err(TypeError::Mismatch); + } + coercion.obligations.extend(ocx.into_pending_obligations()); + continue; + } + _ => { + coercion.obligations.push(obligation); + continue; + } + }; + debug!("coerce_unsized resolve step: {:?}", trait_pred); + match self.infer_ctxt().select(&obligation.with(self.interner(), trait_pred)) { + // Uncertain or unimplemented. + Ok(None) => { + if trait_pred.def_id().0 == unsize_did { + let self_ty = trait_pred.self_ty(); + let unsize_ty = trait_pred.trait_ref.args.inner()[1].expect_ty(); + debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred); + match (self_ty.kind(), unsize_ty.kind()) { + (TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..)) + if self.table.type_var_is_sized(v) => + { + debug!("coerce_unsized: have sized infer {:?}", v); + coercion.obligations.push(obligation); + // `$0: Unsize` where we know that `$0: Sized`, try going + // for unsizing. + } + _ => { + // Some other case for `$0: Unsize`. Note that we + // hit this case even if `Something` is a sized type, so just + // don't do the coercion. + debug!("coerce_unsized: ambiguous unsize"); + return Err(TypeError::Mismatch); + } + } + } else { + debug!("coerce_unsized: early return - ambiguous"); + if !coerce_source.references_non_lt_error() + && !coerce_target.references_non_lt_error() + { + // rustc always early-returns here, even when the types contains errors. However not bailing + // improves error recovery, and while we don't implement generic consts properly, it also helps + // correct code. + return Err(TypeError::Mismatch); + } + } + } + Err(SelectionError::Unimplemented) => { + debug!("coerce_unsized: early return - can't prove obligation"); + return Err(TypeError::Mismatch); + } + + Err(SelectionError::TraitDynIncompatible(_)) => { + // Dyn compatibility errors in coercion will *always* be due to the + // fact that the RHS of the coercion is a non-dyn compatible `dyn Trait` + // written in source somewhere (otherwise we will never have lowered + // the dyn trait from HIR to middle). + // + // There's no reason to emit yet another dyn compatibility error, + // especially since the span will differ slightly and thus not be + // deduplicated at all! + self.set_tainted_by_errors(); + } + Err(_err) => { + // FIXME: Report an error: + // let guar = self.err_ctxt().report_selection_error( + // obligation.clone(), + // &obligation, + // &err, + // ); + self.set_tainted_by_errors(); + // Treat this like an obligation and follow through + // with the unsizing - the lack of a coercion should + // be silent, as it causes a type mismatch later. + } - Ok(ok) + Ok(Some(ImplSource::UserDefined(impl_source))) => { + queue.extend(impl_source.nested); + } + Ok(Some(impl_source)) => queue.extend(impl_source.nested_obligations()), } - _ => self.unify_and(&from_ty, to_ty, identity), } + + Ok(coercion) } - fn coerce_from_fn_pointer( + fn coerce_from_safe_fn( &mut self, - from_ty: Ty, - from_f: &FnPointer, - to_ty: &Ty, - ) -> CoerceResult { - self.coerce_from_safe_fn( - from_ty, - from_f, - to_ty, - simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)), - identity, - ) + fn_ty_a: PolyFnSig<'db>, + b: Ty<'db>, + adjustment: Option, + ) -> CoerceResult<'db> { + debug_assert!(self.table.shallow_resolve(b) == b); + + self.commit_if_ok(|this| { + if let TyKind::FnPtr(_, hdr_b) = b.kind() + && fn_ty_a.safety().is_safe() + && !hdr_b.safety.is_safe() + { + let unsafe_a = Ty::safe_to_unsafe_fn_ty(this.interner(), fn_ty_a); + this.unify_and( + unsafe_a, + b, + adjustment.map(|kind| Adjustment { + kind, + target: Ty::new_fn_ptr(this.interner(), fn_ty_a).to_chalk(this.interner()), + }), + Adjust::Pointer(PointerCast::UnsafeFnPointer), + ) + } else { + let a = Ty::new_fn_ptr(this.interner(), fn_ty_a); + match adjustment { + Some(adjust) => this.unify_and(a, b, [], adjust), + None => this.unify(a, b), + } + } + }) } - fn coerce_from_safe_fn( - &mut self, - from_ty: Ty, - from_fn_ptr: &FnPointer, - to_ty: &Ty, - to_unsafe: F, - normal: G, - ) -> CoerceResult - where - F: FnOnce(Ty) -> Vec, - G: FnOnce(Ty) -> Vec, - { - if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) - && let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) = - (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety) - { - let from_unsafe = - TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner); - return self.unify_and(&from_unsafe, to_ty, to_unsafe); + fn coerce_from_fn_pointer(&mut self, fn_ty_a: PolyFnSig<'db>, b: Ty<'db>) -> CoerceResult<'db> { + debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer"); + debug_assert!(self.table.shallow_resolve(b) == b); + + self.coerce_from_safe_fn(fn_ty_a, b, None) + } + + fn coerce_from_fn_item(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> { + debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b); + debug_assert!(self.table.shallow_resolve(a) == a); + debug_assert!(self.table.shallow_resolve(b) == b); + + match b.kind() { + TyKind::FnPtr(_, b_hdr) => { + let a_sig = a.fn_sig(self.interner()); + if let TyKind::FnDef(def_id, _) = a.kind() { + // Intrinsics are not coercible to function pointers + if let CallableDefId::FunctionId(def_id) = def_id.0 { + if FunctionSignature::is_intrinsic(self.table.db, def_id) { + return Err(TypeError::IntrinsicCast); + } + + let attrs = self.table.db.attrs(def_id.into()); + if attrs.by_key(sym::rustc_force_inline).exists() { + return Err(TypeError::ForceInlineCast); + } + + if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() { + let fn_target_features = + TargetFeatures::from_attrs_no_implications(&attrs); + // Allow the coercion if the current function has all the features that would be + // needed to call the coercee safely. + let (target_features, target_feature_is_safe) = + (self.target_features)(); + if target_feature_is_safe == TargetFeatureIsSafeInTarget::No + && !target_features.enabled.is_superset(&fn_target_features.enabled) + { + return Err(TypeError::TargetFeatureCast( + CallableIdWrapper(def_id.into()).into(), + )); + } + } + } + } + + self.coerce_from_safe_fn( + a_sig, + b, + Some(Adjust::Pointer(PointerCast::ReifyFnPointer)), + ) + } + _ => self.unify(a, b), } - self.unify_and(&from_ty, to_ty, normal) } - /// Attempts to coerce from the type of a non-capturing closure into a - /// function pointer. + /// Attempts to coerce from the type of a non-capturing closure + /// into a function pointer. fn coerce_closure_to_fn( &mut self, - from_ty: Ty, - from_substs: &Substitution, - to_ty: &Ty, - ) -> CoerceResult { - match to_ty.kind(Interner) { - // if from_substs is non-capturing (FIXME) - TyKind::Function(fn_ty) => { + a: Ty<'db>, + _closure_def_id_a: InternedClosureId, + args_a: GenericArgs<'db>, + b: Ty<'db>, + ) -> CoerceResult<'db> { + debug_assert!(self.table.shallow_resolve(a) == a); + debug_assert!(self.table.shallow_resolve(b) == b); + + match b.kind() { + // FIXME: We need to have an `upvars_mentioned()` query: + // At this point we haven't done capture analysis, which means + // that the ClosureArgs just contains an inference variable instead + // of tuple of captured types. + // + // All we care here is if any variable is being captured and not the exact paths, + // so we check `upvars_mentioned` for root variables being captured. + TyKind::FnPtr(_, hdr) => + // if self + // .db + // .upvars_mentioned(closure_def_id_a.expect_local()) + // .is_none_or(|u| u.is_empty()) => + { // We coerce the closure, which has fn type // `extern "rust-call" fn((arg0,arg1,...)) -> _` // to // `fn(arg0,arg1,...) -> _` // or // `unsafe fn(arg0,arg1,...) -> _` - let safety = fn_ty.sig.safety; - let pointer_ty = coerce_closure_fn_ty(from_substs, safety); + let safety = hdr.safety; + let closure_sig = args_a.closure_sig_untupled().map_bound(|mut sig| { + sig.safety = hdr.safety; + sig + }); + let pointer_ty = Ty::new_fn_ptr(self.interner(), closure_sig); + debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", a, b, pointer_ty); self.unify_and( - &pointer_ty, - to_ty, - simple(Adjust::Pointer(PointerCast::ClosureFnPointer(safety))), + pointer_ty, + b, + [], + Adjust::Pointer(PointerCast::ClosureFnPointer( + safety.to_chalk(self.interner()), + )), ) } - _ => self.unify_and(&from_ty, to_ty, identity), + _ => self.unify(a, b), } } - /// Coerce a type using `from_ty: CoerceUnsized` - /// - /// See: - fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> CoerceResult { - // These 'if' statements require some explanation. - // The `CoerceUnsized` trait is special - it is only - // possible to write `impl CoerceUnsized for A` where - // A and B have 'matching' fields. This rules out the following - // two types of blanket impls: - // - // `impl CoerceUnsized for SomeType` - // `impl CoerceUnsized for T` - // - // Both of these trigger a special `CoerceUnsized`-related error (E0376) - // - // We can take advantage of this fact to avoid performing unnecessary work. - // If either `source` or `target` is a type variable, then any applicable impl - // would need to be generic over the self-type (`impl CoerceUnsized for T`) - // or generic over the `CoerceUnsized` type parameter (`impl CoerceUnsized for - // SomeType`). - // - // However, these are exactly the kinds of impls which are forbidden by - // the compiler! Therefore, we can be sure that coercion will always fail - // when either the source or target type is a type variable. This allows us - // to skip performing any trait selection, and immediately bail out. - if from_ty.is_ty_var() { - return Err(TypeError); + fn coerce_raw_ptr(&mut self, a: Ty<'db>, b: Ty<'db>, mutbl_b: Mutability) -> CoerceResult<'db> { + debug!("coerce_raw_ptr(a={:?}, b={:?})", a, b); + debug_assert!(self.table.shallow_resolve(a) == a); + debug_assert!(self.table.shallow_resolve(b) == b); + + let (is_ref, mt_a) = match a.kind() { + TyKind::Ref(_, ty, mutbl) => (true, TypeAndMut::> { ty, mutbl }), + TyKind::RawPtr(ty, mutbl) => (false, TypeAndMut { ty, mutbl }), + _ => return self.unify(a, b), + }; + coerce_mutbls(mt_a.mutbl, mutbl_b)?; + + // Check that the types which they point at are compatible. + let a_raw = Ty::new_ptr(self.interner(), mt_a.ty, mutbl_b); + // Although references and raw ptrs have the same + // representation, we still register an Adjust::DerefRef so that + // regionck knows that the region for `a` must be valid here. + if is_ref { + self.unify_and( + a_raw, + b, + [Adjustment { + kind: Adjust::Deref(None), + target: mt_a.ty.to_chalk(self.interner()), + }], + Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b.to_chalk(self.interner()))), + ) + } else if mt_a.mutbl != mutbl_b { + self.unify_and(a_raw, b, [], Adjust::Pointer(PointerCast::MutToConstPointer)) + } else { + self.unify(a_raw, b) } - if to_ty.is_ty_var() { - return Err(TypeError); + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum CoerceNever { + No, + Yes, +} + +impl<'db> InferenceContext<'db> { + /// Attempt to coerce an expression to a type, and return the + /// adjusted type of the expression, if successful. + /// Adjustments are only recorded if the coercion succeeded. + /// The expressions *must not* have any preexisting adjustments. + pub(crate) fn coerce( + &mut self, + expr: ExprOrPatId, + expr_ty: Ty<'db>, + mut target: Ty<'db>, + allow_two_phase: AllowTwoPhase, + coerce_never: CoerceNever, + ) -> RelateResult<'db, Ty<'db>> { + let source = self.table.try_structurally_resolve_type(expr_ty); + target = self.table.try_structurally_resolve_type(target); + debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target); + + let cause = ObligationCause::new(); + let krate = self.krate(); + let mut coerce = Coerce { + table: &mut self.table, + has_errors: &mut self.result.has_errors, + cause, + allow_two_phase, + coerce_never: matches!(coerce_never, CoerceNever::Yes), + use_lub: false, + target_features: &mut || { + Self::target_features(self.db, &self.target_features, self.owner, krate) + }, + }; + let ok = coerce.commit_if_ok(|coerce| coerce.coerce(source, target))?; + + let (adjustments, _) = self.table.register_infer_ok(ok); + match expr { + ExprOrPatId::ExprId(expr) => self.write_expr_adj(expr, adjustments.into_boxed_slice()), + ExprOrPatId::PatId(pat) => self + .write_pat_adj(pat, adjustments.into_iter().map(|adjust| adjust.target).collect()), } + Ok(target) + } - // Handle reborrows before trying to solve `Source: CoerceUnsized`. - let reborrow = match (from_ty.kind(Interner), to_ty.kind(Interner)) { - (TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => { - coerce_mutabilities(*from_mt, to_mt)?; + /// Given some expressions, their known unified type and another expression, + /// tries to unify the types, potentially inserting coercions on any of the + /// provided expressions and returns their LUB (aka "common supertype"). + /// + /// This is really an internal helper. From outside the coercion + /// module, you should instantiate a `CoerceMany` instance. + fn try_find_coercion_lub( + &mut self, + exprs: &[ExprId], + prev_ty: Ty<'db>, + new: ExprId, + new_ty: Ty<'db>, + ) -> RelateResult<'db, Ty<'db>> { + let prev_ty = self.table.try_structurally_resolve_type(prev_ty); + let new_ty = self.table.try_structurally_resolve_type(new_ty); + debug!( + "coercion::try_find_coercion_lub({:?}, {:?}, exprs={:?} exprs)", + prev_ty, + new_ty, + exprs.len() + ); + + // The following check fixes #88097, where the compiler erroneously + // attempted to coerce a closure type to itself via a function pointer. + if prev_ty == new_ty { + return Ok(prev_ty); + } - let lt = self.new_lifetime_var(); - Some(( - Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() }, - Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), to_mt)), - target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner), - }, - )) + let is_force_inline = |ty: Ty<'db>| { + if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() { + self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists() + } else { + false } - (TyKind::Ref(from_mt, _, from_inner), &TyKind::Raw(to_mt, _)) => { - coerce_mutabilities(*from_mt, to_mt)?; + }; + if is_force_inline(prev_ty) || is_force_inline(new_ty) { + return Err(TypeError::ForceInlineCast); + } - Some(( - Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() }, - Adjustment { - kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)), - target: TyKind::Raw(to_mt, from_inner.clone()).intern(Interner), - }, - )) + // Special-case that coercion alone cannot handle: + // Function items or non-capturing closures of differing IDs or GenericArgs. + let (a_sig, b_sig) = { + let is_capturing_closure = |_ty: Ty<'db>| { + // FIXME: + // if let TyKind::Closure(closure_def_id, _args) = ty.kind() { + // self.db.upvars_mentioned(closure_def_id.expect_local()).is_some() + // } else { + // false + // } + false + }; + if is_capturing_closure(prev_ty) || is_capturing_closure(new_ty) { + (None, None) + } else { + match (prev_ty.kind(), new_ty.kind()) { + (TyKind::FnDef(..), TyKind::FnDef(..)) => { + // Don't reify if the function types have a LUB, i.e., they + // are the same function and their parameters have a LUB. + match self.table.commit_if_ok(|table| { + // We need to eagerly handle nested obligations due to lazy norm. + let mut ocx = ObligationCtxt::new(&table.infer_ctxt); + let value = + ocx.lub(&ObligationCause::new(), table.param_env, prev_ty, new_ty)?; + if ocx.select_where_possible().is_empty() { + Ok(InferOk { value, obligations: ocx.into_pending_obligations() }) + } else { + Err(TypeError::Mismatch) + } + }) { + // We have a LUB of prev_ty and new_ty, just return it. + Ok(ok) => return Ok(self.table.register_infer_ok(ok)), + Err(_) => ( + Some(prev_ty.fn_sig(self.table.interner)), + Some(new_ty.fn_sig(self.table.interner)), + ), + } + } + (TyKind::Closure(_, args), TyKind::FnDef(..)) => { + let b_sig = new_ty.fn_sig(self.table.interner); + let a_sig = args.closure_sig_untupled().map_bound(|mut sig| { + sig.safety = b_sig.safety(); + sig + }); + (Some(a_sig), Some(b_sig)) + } + (TyKind::FnDef(..), TyKind::Closure(_, args)) => { + let a_sig = prev_ty.fn_sig(self.table.interner); + let b_sig = args.closure_sig_untupled().map_bound(|mut sig| { + sig.safety = a_sig.safety(); + sig + }); + (Some(a_sig), Some(b_sig)) + } + (TyKind::Closure(_, args_a), TyKind::Closure(_, args_b)) => { + (Some(args_a.closure_sig_untupled()), Some(args_b.closure_sig_untupled())) + } + _ => (None, None), + } } - _ => None, }; - let coerce_from = - reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone()); + if let (Some(a_sig), Some(b_sig)) = (a_sig, b_sig) { + // The signature must match. + let sig = self + .table + .infer_ctxt + .at(&ObligationCause::new(), self.table.param_env) + .lub(a_sig, b_sig) + .map(|ok| self.table.register_infer_ok(ok))?; + + // Reify both sides and return the reified fn pointer type. + let fn_ptr = Ty::new_fn_ptr(self.table.interner, sig); + let prev_adjustment = match prev_ty.kind() { + TyKind::Closure(..) => Adjust::Pointer(PointerCast::ClosureFnPointer( + a_sig.safety().to_chalk(self.table.interner), + )), + TyKind::FnDef(..) => Adjust::Pointer(PointerCast::ReifyFnPointer), + _ => panic!("should not try to coerce a {prev_ty:?} to a fn pointer"), + }; + let next_adjustment = match new_ty.kind() { + TyKind::Closure(..) => Adjust::Pointer(PointerCast::ClosureFnPointer( + b_sig.safety().to_chalk(self.table.interner), + )), + TyKind::FnDef(..) => Adjust::Pointer(PointerCast::ReifyFnPointer), + _ => panic!("should not try to coerce a {new_ty:?} to a fn pointer"), + }; + for &expr in exprs { + self.write_expr_adj( + expr, + Box::new([Adjustment { + kind: prev_adjustment.clone(), + target: fn_ptr.to_chalk(self.table.interner), + }]), + ); + } + self.write_expr_adj( + new, + Box::new([Adjustment { + kind: next_adjustment, + target: fn_ptr.to_chalk(self.table.interner), + }]), + ); + return Ok(fn_ptr); + } - let krate = self.trait_env.krate; - let coerce_unsized_trait = match LangItem::CoerceUnsized.resolve_trait(self.db, krate) { - Some(trait_) => trait_, - _ => return Err(TypeError), + // Configure a Coerce instance to compute the LUB. + // We don't allow two-phase borrows on any autorefs this creates since we + // probably aren't processing function arguments here and even if we were, + // they're going to get autorefed again anyway and we can apply 2-phase borrows + // at that time. + // + // NOTE: we set `coerce_never` to `true` here because coercion LUBs only + // operate on values and not places, so a never coercion is valid. + let krate = self.krate(); + let mut coerce = Coerce { + table: &mut self.table, + has_errors: &mut self.result.has_errors, + cause: ObligationCause::new(), + allow_two_phase: AllowTwoPhase::No, + coerce_never: true, + use_lub: true, + target_features: &mut || { + Self::target_features(self.db, &self.target_features, self.owner, krate) + }, }; - let coerce_unsized_tref = { - let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait); - if b.remaining() != 2 { - // The CoerceUnsized trait should have two generic params: Self and T. - return Err(TypeError); + // First try to coerce the new expression to the type of the previous ones, + // but only if the new expression has no coercion already applied to it. + let mut first_error = None; + if !self.result.expr_adjustments.contains_key(&new) { + let result = coerce.commit_if_ok(|coerce| coerce.coerce(new_ty, prev_ty)); + match result { + Ok(ok) => { + let (adjustments, target) = self.table.register_infer_ok(ok); + self.write_expr_adj(new, adjustments.into_boxed_slice()); + debug!( + "coercion::try_find_coercion_lub: was able to coerce from new type {:?} to previous type {:?} ({:?})", + new_ty, prev_ty, target + ); + return Ok(target); + } + Err(e) => first_error = Some(e), } - b.push(coerce_from).push(to_ty.clone()).build() - }; + } - let goal: InEnvironment = - InEnvironment::new(&self.trait_env.env, coerce_unsized_tref.cast(Interner)); - - let canonicalized = self.canonicalize_with_free_vars(goal); - - // FIXME: rustc's coerce_unsized is more specialized -- it only tries to - // solve `CoerceUnsized` and `Unsize` goals at this point and leaves the - // rest for later. Also, there's some logic about sized type variables. - // Need to find out in what cases this is necessary - let solution = self - .db - .trait_solve(krate, self.trait_env.block, canonicalized.value.clone().cast(Interner)) - .ok_or(TypeError)?; - - match solution { - Solution::Unique(v) => { - canonicalized.apply_solution( - self, - Canonical { - binders: v.binders, - // FIXME handle constraints - value: v.value.subst, - }, - ); + match coerce.commit_if_ok(|coerce| coerce.coerce(prev_ty, new_ty)) { + Err(_) => { + // Avoid giving strange errors on failed attempts. + if let Some(e) = first_error { + Err(e) + } else { + Err(self + .table + .commit_if_ok(|table| { + table + .infer_ctxt + .at(&ObligationCause::new(), table.param_env) + .lub(prev_ty, new_ty) + }) + .unwrap_err()) + } } - Solution::Ambig(Guidance::Definite(subst)) => { - // FIXME need to record an obligation here - canonicalized.apply_solution(self, subst) + Ok(ok) => { + let (adjustments, target) = self.table.register_infer_ok(ok); + for &expr in exprs { + self.write_expr_adj(expr, adjustments.as_slice().into()); + } + debug!( + "coercion::try_find_coercion_lub: was able to coerce previous type {:?} to new type {:?} ({:?})", + prev_ty, new_ty, target + ); + Ok(target) } - // FIXME actually we maybe should also accept unknown guidance here - _ => return Err(TypeError), - }; - let unsize = - Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: to_ty.clone() }; - let adjustments = match reborrow { - None => vec![unsize], - Some((deref, autoref)) => vec![deref, autoref, unsize], - }; - success(adjustments, to_ty.clone(), vec![]) + } } } -fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty { - let closure_sig = ClosureSubst(closure_substs).sig_ty().clone(); - match closure_sig.kind(Interner) { - TyKind::Function(fn_ty) => TyKind::Function(FnPointer { - num_binders: fn_ty.num_binders, - sig: FnSig { safety, abi: FnAbi::Rust, variadic: fn_ty.sig.variadic }, - substitution: fn_ty.substitution.clone(), - }) - .intern(Interner), - _ => TyKind::Error.intern(Interner), - } +/// CoerceMany encapsulates the pattern you should use when you have +/// many expressions that are all getting coerced to a common +/// type. This arises, for example, when you have a match (the result +/// of each arm is coerced to a common type). It also arises in less +/// obvious places, such as when you have many `break foo` expressions +/// that target the same loop, or the various `return` expressions in +/// a function. +/// +/// The basic protocol is as follows: +/// +/// - Instantiate the `CoerceMany` with an initial `expected_ty`. +/// This will also serve as the "starting LUB". The expectation is +/// that this type is something which all of the expressions *must* +/// be coercible to. Use a fresh type variable if needed. +/// - For each expression whose result is to be coerced, invoke `coerce()` with. +/// - In some cases we wish to coerce "non-expressions" whose types are implicitly +/// unit. This happens for example if you have a `break` with no expression, +/// or an `if` with no `else`. In that case, invoke `coerce_forced_unit()`. +/// - `coerce()` and `coerce_forced_unit()` may report errors. They hide this +/// from you so that you don't have to worry your pretty head about it. +/// But if an error is reported, the final type will be `err`. +/// - Invoking `coerce()` may cause us to go and adjust the "adjustments" on +/// previously coerced expressions. +/// - When all done, invoke `complete()`. This will return the LUB of +/// all your expressions. +/// - WARNING: I don't believe this final type is guaranteed to be +/// related to your initial `expected_ty` in any particular way, +/// although it will typically be a subtype, so you should check it. +/// - Invoking `complete()` may cause us to go and adjust the "adjustments" on +/// previously coerced expressions. +/// +/// Example: +/// +/// ```ignore (illustrative) +/// let mut coerce = CoerceMany::new(expected_ty); +/// for expr in exprs { +/// let expr_ty = fcx.check_expr_with_expectation(expr, expected); +/// coerce.coerce(fcx, &cause, expr, expr_ty); +/// } +/// let final_ty = coerce.complete(fcx); +/// ``` +#[derive(Debug, Clone)] +pub(crate) struct CoerceMany<'db, 'exprs> { + expected_ty: Ty<'db>, + final_ty: Option>, + expressions: Expressions<'exprs>, + pushed: usize, } -fn safe_to_unsafe_fn_ty(fn_ty: FnPointer) -> FnPointer { - FnPointer { - num_binders: fn_ty.num_binders, - sig: FnSig { safety: chalk_ir::Safety::Unsafe, ..fn_ty.sig }, - substitution: fn_ty.substitution, - } +/// The type of a `CoerceMany` that is storing up the expressions into +/// a buffer. We use this for things like `break`. +pub(crate) type DynamicCoerceMany<'db> = CoerceMany<'db, 'db>; + +#[derive(Debug, Clone)] +enum Expressions<'exprs> { + Dynamic(SmallVec<[ExprId; 4]>), + UpFront(&'exprs [ExprId]), } -fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError> { - match (from, to) { - (Mutability::Mut, Mutability::Mut | Mutability::Not) - | (Mutability::Not, Mutability::Not) => Ok(()), - (Mutability::Not, Mutability::Mut) => Err(TypeError), +impl<'db, 'exprs> CoerceMany<'db, 'exprs> { + /// The usual case; collect the set of expressions dynamically. + /// If the full set of coercion sites is known before hand, + /// consider `with_coercion_sites()` instead to avoid allocation. + pub(crate) fn new(expected_ty: Ty<'db>) -> Self { + Self::make(expected_ty, Expressions::Dynamic(SmallVec::new())) + } + + /// As an optimization, you can create a `CoerceMany` with a + /// preexisting slice of expressions. In this case, you are + /// expected to pass each element in the slice to `coerce(...)` in + /// order. This is used with arrays in particular to avoid + /// needlessly cloning the slice. + pub(crate) fn with_coercion_sites( + expected_ty: Ty<'db>, + coercion_sites: &'exprs [ExprId], + ) -> Self { + Self::make(expected_ty, Expressions::UpFront(coercion_sites)) + } + + fn make(expected_ty: Ty<'db>, expressions: Expressions<'exprs>) -> Self { + CoerceMany { expected_ty, final_ty: None, expressions, pushed: 0 } + } + + /// Returns the "expected type" with which this coercion was + /// constructed. This represents the "downward propagated" type + /// that was given to us at the start of typing whatever construct + /// we are typing (e.g., the match expression). + /// + /// Typically, this is used as the expected type when + /// type-checking each of the alternative expressions whose types + /// we are trying to merge. + pub(crate) fn expected_ty(&self) -> Ty<'db> { + self.expected_ty + } + + /// Returns the current "merged type", representing our best-guess + /// at the LUB of the expressions we've seen so far (if any). This + /// isn't *final* until you call `self.complete()`, which will return + /// the merged type. + pub(crate) fn merged_ty(&self) -> Ty<'db> { + self.final_ty.unwrap_or(self.expected_ty) + } + + /// Indicates that the value generated by `expression`, which is + /// of type `expression_ty`, is one of the possibilities that we + /// could coerce from. This will record `expression`, and later + /// calls to `coerce` may come back and add adjustments and things + /// if necessary. + pub(crate) fn coerce( + &mut self, + icx: &mut InferenceContext<'db>, + cause: &ObligationCause, + expression: ExprId, + expression_ty: Ty<'db>, + ) { + self.coerce_inner(icx, cause, expression, expression_ty, false, false) + } + + /// Indicates that one of the inputs is a "forced unit". This + /// occurs in a case like `if foo { ... };`, where the missing else + /// generates a "forced unit". Another example is a `loop { break; + /// }`, where the `break` has no argument expression. We treat + /// these cases slightly differently for error-reporting + /// purposes. Note that these tend to correspond to cases where + /// the `()` expression is implicit in the source, and hence we do + /// not take an expression argument. + /// + /// The `augment_error` gives you a chance to extend the error + /// message, in case any results (e.g., we use this to suggest + /// removing a `;`). + pub(crate) fn coerce_forced_unit( + &mut self, + icx: &mut InferenceContext<'db>, + expr: ExprId, + cause: &ObligationCause, + label_unit_as_expected: bool, + ) { + self.coerce_inner( + icx, + cause, + expr, + icx.result.standard_types.unit.to_nextsolver(icx.table.interner), + true, + label_unit_as_expected, + ) + } + + /// The inner coercion "engine". If `expression` is `None`, this + /// is a forced-unit case, and hence `expression_ty` must be + /// `Nil`. + pub(crate) fn coerce_inner( + &mut self, + icx: &mut InferenceContext<'db>, + cause: &ObligationCause, + expression: ExprId, + mut expression_ty: Ty<'db>, + force_unit: bool, + label_expression_as_expected: bool, + ) { + // Incorporate whatever type inference information we have + // until now; in principle we might also want to process + // pending obligations, but doing so should only improve + // compatibility (hopefully that is true) by helping us + // uncover never types better. + if expression_ty.is_ty_var() { + expression_ty = icx.shallow_resolve(expression_ty); + } + + let (expected, found) = if label_expression_as_expected { + // In the case where this is a "forced unit", like + // `break`, we want to call the `()` "expected" + // since it is implied by the syntax. + // (Note: not all force-units work this way.)" + (expression_ty, self.merged_ty()) + } else { + // Otherwise, the "expected" type for error + // reporting is the current unification type, + // which is basically the LUB of the expressions + // we've seen so far (combined with the expected + // type) + (self.merged_ty(), expression_ty) + }; + + // Handle the actual type unification etc. + let result = if !force_unit { + if self.pushed == 0 { + // Special-case the first expression we are coercing. + // To be honest, I'm not entirely sure why we do this. + // We don't allow two-phase borrows, see comment in try_find_coercion_lub for why + icx.coerce( + expression.into(), + expression_ty, + self.expected_ty, + AllowTwoPhase::No, + CoerceNever::Yes, + ) + } else { + match self.expressions { + Expressions::Dynamic(ref exprs) => icx.try_find_coercion_lub( + exprs, + self.merged_ty(), + expression, + expression_ty, + ), + Expressions::UpFront(coercion_sites) => icx.try_find_coercion_lub( + &coercion_sites[0..self.pushed], + self.merged_ty(), + expression, + expression_ty, + ), + } + } + } else { + // this is a hack for cases where we default to `()` because + // the expression etc has been omitted from the source. An + // example is an `if let` without an else: + // + // if let Some(x) = ... { } + // + // we wind up with a second match arm that is like `_ => + // ()`. That is the case we are considering here. We take + // a different path to get the right "expected, found" + // message and so forth (and because we know that + // `expression_ty` will be unit). + // + // Another example is `break` with no argument expression. + assert!(expression_ty.is_unit(), "if let hack without unit type"); + icx.table + .infer_ctxt + .at(cause, icx.table.param_env) + .eq( + // needed for tests/ui/type-alias-impl-trait/issue-65679-inst-opaque-ty-from-val-twice.rs + DefineOpaqueTypes::Yes, + expected, + found, + ) + .map(|infer_ok| { + icx.table.register_infer_ok(infer_ok); + expression_ty + }) + }; + + debug!(?result); + match result { + Ok(v) => { + self.final_ty = Some(v); + match self.expressions { + Expressions::Dynamic(ref mut buffer) => buffer.push(expression), + Expressions::UpFront(coercion_sites) => { + // if the user gave us an array to validate, check that we got + // the next expression in the list, as expected + assert_eq!(coercion_sites[self.pushed], expression); + } + } + } + Err(_coercion_error) => { + // Mark that we've failed to coerce the types here to suppress + // any superfluous errors we might encounter while trying to + // emit or provide suggestions on how to fix the initial error. + icx.set_tainted_by_errors(); + + self.final_ty = Some(Ty::new_error(icx.table.interner, ErrorGuaranteed)); + + icx.result.type_mismatches.insert( + expression.into(), + if label_expression_as_expected { + TypeMismatch { + expected: found.to_chalk(icx.table.interner), + actual: expected.to_chalk(icx.table.interner), + } + } else { + TypeMismatch { + expected: expected.to_chalk(icx.table.interner), + actual: found.to_chalk(icx.table.interner), + } + }, + ); + } + } + + self.pushed += 1; + } + + pub(crate) fn complete(self, icx: &mut InferenceContext<'db>) -> Ty<'db> { + if let Some(final_ty) = self.final_ty { + final_ty + } else { + // If we only had inputs that were of type `!` (or no + // inputs at all), then the final type is `!`. + assert_eq!(self.pushed, 0); + icx.result.standard_types.never.to_nextsolver(icx.table.interner) + } } } -pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec { - let steps = autoderef.steps(); - let targets = - steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty())); - steps - .iter() - .map(|(kind, _source)| match kind { - // We do not know what kind of deref we require at this point yet - AutoderefKind::Overloaded => Some(OverloadedDeref(None)), - AutoderefKind::Builtin => None, - }) - .zip(targets) - .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target }) - .collect() +pub fn could_coerce( + db: &dyn HirDatabase, + env: Arc, + tys: &crate::Canonical<(crate::Ty, crate::Ty)>, +) -> bool { + coerce(db, env, tys).is_ok() +} + +fn coerce<'db>( + db: &'db dyn HirDatabase, + env: Arc, + tys: &crate::Canonical<(crate::Ty, crate::Ty)>, +) -> Result<(Vec, crate::Ty), TypeError>> { + let mut table = InferenceTable::new(db, env); + let vars = table.fresh_subst(tys.binders.as_slice(Interner)); + let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); + let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); + + let cause = ObligationCause::new(); + // FIXME: Target features. + let target_features = TargetFeatures::default(); + let mut coerce = Coerce { + table: &mut table, + has_errors: &mut false, + cause, + allow_two_phase: AllowTwoPhase::No, + coerce_never: true, + use_lub: false, + target_features: &mut || (&target_features, TargetFeatureIsSafeInTarget::No), + }; + let InferOk { value: (adjustments, ty), obligations } = coerce.coerce( + ty1_with_vars.to_nextsolver(coerce.table.interner), + ty2_with_vars.to_nextsolver(coerce.table.interner), + )?; + table.register_predicates(obligations); + + // default any type vars that weren't unified back to their original bound vars + // (kind of hacky) + let find_var = |iv| { + vars.iter(Interner).position(|v| match v.interned() { + chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner), + chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner), + chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner), + } == Some(iv)) + }; + let fallback = |iv, kind, default, binder| match kind { + chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv) + .map_or(default, |i| crate::BoundVar::new(binder, i).to_ty(Interner).cast(Interner)), + chalk_ir::VariableKind::Lifetime => find_var(iv).map_or(default, |i| { + crate::BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner) + }), + chalk_ir::VariableKind::Const(ty) => find_var(iv).map_or(default, |i| { + crate::BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner) + }), + }; + // FIXME also map the types in the adjustments + Ok((adjustments, table.resolve_with_fallback(ty.to_chalk(table.interner), &fallback))) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 16fc2bfc0631f..c5a51dfc4cf92 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -1,12 +1,10 @@ //! Type inference for expressions. -use std::{ - iter::{repeat, repeat_with}, - mem, -}; +use std::{iter::repeat_with, mem}; use chalk_ir::{DebruijnIndex, Mutability, TyVariableKind, cast::Cast}; use either::Either; +use hir_def::hir::ClosureKind; use hir_def::{ BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, expr_store::path::{GenericArg, GenericArgs, Path}, @@ -19,29 +17,37 @@ use hir_def::{ }; use hir_expand::name::Name; use intern::sym; +use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _}; use stdx::always; use syntax::ast::RangeOp; +use tracing::debug; +use crate::autoderef::overloaded_deref_ty; +use crate::next_solver::ErrorGuaranteed; +use crate::next_solver::infer::DefineOpaqueTypes; +use crate::next_solver::obligation_ctxt::ObligationCtxt; use crate::{ Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext, - DeclOrigin, IncorrectGenericsLenKind, Interner, Rawness, Scalar, Substitution, - TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, - autoderef::{Autoderef, builtin_deref, deref_by_trait}, - consteval, + DeclOrigin, IncorrectGenericsLenKind, Interner, LifetimeElisionKind, Rawness, Scalar, + Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, consteval, generics::generics, infer::{ - BreakableKind, - coerce::{CoerceMany, CoerceNever, CoercionCause}, + AllowTwoPhase, BreakableKind, + coerce::{CoerceMany, CoerceNever}, find_continuable, pat::contains_explicit_ref_binding, }, lang_items::lang_items_for_bin_op, lower::{ - LifetimeElisionKind, ParamLoweringMode, lower_to_chalk_mutability, + ParamLoweringMode, lower_to_chalk_mutability, path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings}, }, mapping::{ToChalk, from_chalk}, method_resolution::{self, VisibleFromModule}, + next_solver::{ + infer::traits::ObligationCause, + mapping::{ChalkToNextSolver, NextSolverToChalk}, + }, primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, traits::FnTrait, @@ -49,7 +55,7 @@ use crate::{ use super::{ BreakableContext, Diverges, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch, - cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable, + cast::CastCheck, find_breakable, }; #[derive(Clone, Copy, PartialEq, Eq)] @@ -58,7 +64,7 @@ pub(crate) enum ExprIsRead { No, } -impl InferenceContext<'_> { +impl<'db> InferenceContext<'db> { pub(crate) fn infer_expr( &mut self, tgt_expr: ExprId, @@ -97,8 +103,14 @@ impl InferenceContext<'_> { } else { CoerceNever::No }; - match self.coerce(Some(expr), &ty, &target, coerce_never) { - Ok(res) => res, + match self.coerce( + expr.into(), + ty.to_nextsolver(self.table.interner), + target.to_nextsolver(self.table.interner), + AllowTwoPhase::No, + coerce_never, + ) { + Ok(res) => res.to_chalk(self.table.interner), Err(_) => { self.result.type_mismatches.insert( expr.into(), @@ -259,8 +271,15 @@ impl InferenceContext<'_> { } if let Some(target) = expected.only_has_type(&mut self.table) { - self.coerce(Some(expr), &ty, &target, CoerceNever::Yes) - .expect("never-to-any coercion should always succeed") + self.coerce( + expr.into(), + ty.to_nextsolver(self.table.interner), + target.to_nextsolver(self.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) + .expect("never-to-any coercion should always succeed") + .to_chalk(self.table.interner) } else { ty } @@ -278,6 +297,7 @@ impl InferenceContext<'_> { } } + #[tracing::instrument(level = "debug", skip(self, is_read), ret)] fn infer_expr_inner( &mut self, tgt_expr: ExprId, @@ -286,7 +306,9 @@ impl InferenceContext<'_> { ) -> Ty { self.db.unwind_if_revision_cancelled(); - let ty = match &self.body[tgt_expr] { + let expr = &self.body[tgt_expr]; + tracing::trace!(?expr); + let ty = match expr { Expr::Missing => self.err_ty(), &Expr::If { condition, then_branch, else_branch } => { let expected = &expected.adjust_for_branches(&mut self.table); @@ -300,27 +322,41 @@ impl InferenceContext<'_> { let then_ty = self.infer_expr_inner(then_branch, expected, ExprIsRead::Yes); let then_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); - let mut coerce = CoerceMany::new(expected.coercion_target_type(&mut self.table)); - coerce.coerce(self, Some(then_branch), &then_ty, CoercionCause::Expr(then_branch)); + let mut coercion_sites = [then_branch, tgt_expr]; + if let Some(else_branch) = else_branch { + coercion_sites[1] = else_branch; + } + let mut coerce = CoerceMany::with_coercion_sites( + expected + .coercion_target_type(&mut self.table) + .to_nextsolver(self.table.interner), + &coercion_sites, + ); + coerce.coerce( + self, + &ObligationCause::new(), + then_branch, + then_ty.to_nextsolver(self.table.interner), + ); match else_branch { Some(else_branch) => { let else_ty = self.infer_expr_inner(else_branch, expected, ExprIsRead::Yes); let else_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); coerce.coerce( self, - Some(else_branch), - &else_ty, - CoercionCause::Expr(else_branch), + &ObligationCause::new(), + else_branch, + else_ty.to_nextsolver(self.table.interner), ); self.diverges = condition_diverges | then_diverges & else_diverges; } None => { - coerce.coerce_forced_unit(self, CoercionCause::Expr(tgt_expr)); + coerce.coerce_forced_unit(self, tgt_expr, &ObligationCause::new(), true); self.diverges = condition_diverges; } } - coerce.complete(self) + coerce.complete(self).to_chalk(self.table.interner) } &Expr::Let { pat, expr } => { let child_is_read = if self.pat_guaranteed_to_constitute_read_for_never(pat) { @@ -373,7 +409,15 @@ impl InferenceContext<'_> { } } Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => self - .infer_closure(body, args, ret_type, arg_types, *closure_kind, tgt_expr, expected), + .infer_closure( + *body, + args, + *ret_type, + arg_types, + *closure_kind, + tgt_expr, + expected, + ), Expr::Call { callee, args, .. } => self.infer_call(tgt_expr, *callee, args, expected), Expr::MethodCall { receiver, args, method_name, generic_args } => self .infer_method_call( @@ -399,7 +443,7 @@ impl InferenceContext<'_> { let matchee_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let mut all_arms_diverge = Diverges::Always; for arm in arms.iter() { - let input_ty = self.resolve_ty_shallow(&input_ty); + let input_ty = self.table.structurally_resolve_type(&input_ty); self.infer_top_pat(arm.pat, &input_ty, None); } @@ -412,7 +456,7 @@ impl InferenceContext<'_> { } _ => self.table.new_type_var(), }; - let mut coerce = CoerceMany::new(result_ty); + let mut coerce = CoerceMany::new(result_ty.to_nextsolver(self.table.interner)); for arm in arms.iter() { if let Some(guard_expr) = arm.guard { @@ -427,12 +471,17 @@ impl InferenceContext<'_> { let arm_ty = self.infer_expr_inner(arm.expr, &expected, ExprIsRead::Yes); all_arms_diverge &= self.diverges; - coerce.coerce(self, Some(arm.expr), &arm_ty, CoercionCause::Expr(arm.expr)); + coerce.coerce( + self, + &ObligationCause::new(), + arm.expr, + arm_ty.to_nextsolver(self.table.interner), + ); } self.diverges = matchee_diverges | all_arms_diverge; - coerce.complete(self) + coerce.complete(self).to_chalk(self.table.interner) } } Expr::Path(p) => self.infer_expr_path(p, tgt_expr.into(), tgt_expr), @@ -450,7 +499,7 @@ impl InferenceContext<'_> { let val_ty = if let Some(expr) = expr { let opt_coerce_to = match find_breakable(&mut self.breakables, label) { Some(ctxt) => match &ctxt.coerce { - Some(coerce) => coerce.expected_ty(), + Some(coerce) => coerce.expected_ty().to_chalk(self.table.interner), None => { self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { expr: tgt_expr, @@ -474,11 +523,12 @@ impl InferenceContext<'_> { match find_breakable(&mut self.breakables, label) { Some(ctxt) => match ctxt.coerce.take() { Some(mut coerce) => { - let cause = match expr { - Some(expr) => CoercionCause::Expr(expr), - None => CoercionCause::Expr(tgt_expr), - }; - coerce.coerce(self, expr, &val_ty, cause); + coerce.coerce( + self, + &ObligationCause::new(), + expr.unwrap_or(tgt_expr), + val_ty.to_nextsolver(self.table.interner), + ); // Avoiding borrowck let ctxt = find_breakable(&mut self.breakables, label) @@ -510,7 +560,13 @@ impl InferenceContext<'_> { ); } else { let unit = self.result.standard_types.unit.clone(); - let _ = self.coerce(Some(tgt_expr), &unit, &yield_ty, CoerceNever::Yes); + let _ = self.coerce( + tgt_expr.into(), + unit.to_nextsolver(self.table.interner), + yield_ty.to_nextsolver(self.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ); } resume_ty } else { @@ -649,7 +705,7 @@ impl InferenceContext<'_> { &Expr::Box { expr } => self.infer_expr_box(expr, expected), Expr::UnaryOp { expr, op } => { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none(), ExprIsRead::Yes); - let inner_ty = self.resolve_ty_shallow(&inner_ty); + let inner_ty = self.table.structurally_resolve_type(&inner_ty); // FIXME: Note down method resolution her match op { UnaryOp::Deref => { @@ -666,11 +722,23 @@ impl InferenceContext<'_> { Substitution::empty(Interner), ); } - if let Some(derefed) = builtin_deref(self.table.db, &inner_ty, true) { - self.resolve_ty_shallow(derefed) + if let Some(derefed) = + inner_ty.to_nextsolver(self.table.interner).builtin_deref(self.db, true) + { + self.table + .structurally_resolve_type(&derefed.to_chalk(self.table.interner)) } else { - deref_by_trait(&mut self.table, inner_ty, false) - .unwrap_or_else(|| self.err_ty()) + let infer_ok = overloaded_deref_ty( + &self.table, + inner_ty.to_nextsolver(self.table.interner), + ); + match infer_ok { + Some(infer_ok) => self + .table + .register_infer_ok(infer_ok) + .to_chalk(self.table.interner), + None => self.err_ty(), + } } } UnaryOp::Neg => { @@ -823,10 +891,10 @@ impl InferenceContext<'_> { let index_ty = self.infer_expr(*index, &Expectation::none(), ExprIsRead::Yes); if let Some(index_trait) = self.resolve_lang_trait(LangItem::Index) { - let canonicalized = self.canonicalize(base_ty.clone()); + let canonicalized = + self.canonicalize(base_ty.clone().to_nextsolver(self.table.interner)); let receiver_adjustments = method_resolution::resolve_indexing_op( - self.db, - self.table.trait_env.clone(), + &mut self.table, canonicalized, index_trait, ); @@ -929,6 +997,7 @@ impl InferenceContext<'_> { } None => { let expected_ty = expected.to_option(&mut self.table); + tracing::debug!(?expected_ty); let opt_ty = match expected_ty.as_ref().map(|it| it.kind(Interner)) { Some(TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))) => expected_ty, Some(TyKind::Scalar(Scalar::Char)) => { @@ -998,18 +1067,30 @@ impl InferenceContext<'_> { // allows them to be inferred based on how they are used later in the // function. if is_input { - let ty = this.resolve_ty_shallow(&ty); + let ty = this.table.structurally_resolve_type(&ty); match ty.kind(Interner) { TyKind::FnDef(def, parameters) => { let fnptr_ty = TyKind::Function( CallableSig::from_def(this.db, *def, parameters).to_fn_ptr(), ) .intern(Interner); - _ = this.coerce(Some(expr), &ty, &fnptr_ty, CoerceNever::Yes); + _ = this.coerce( + expr.into(), + ty.to_nextsolver(this.table.interner), + fnptr_ty.to_nextsolver(this.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ); } TyKind::Ref(mutbl, _, base_ty) => { let ptr_ty = TyKind::Raw(*mutbl, base_ty.clone()).intern(Interner); - _ = this.coerce(Some(expr), &ty, &ptr_ty, CoerceNever::Yes); + _ = this.coerce( + expr.into(), + ty.to_nextsolver(this.table.interner), + ptr_ty.to_nextsolver(this.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ); } _ => {} } @@ -1087,15 +1168,23 @@ impl InferenceContext<'_> { let ret_ty = self.table.new_type_var(); let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); - let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty.clone())); + let prev_ret_coercion = self + .return_coercion + .replace(CoerceMany::new(ret_ty.to_nextsolver(self.table.interner))); // FIXME: We should handle async blocks like we handle closures let expected = &Expectation::has_type(ret_ty); let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| { let ty = this.infer_block(tgt_expr, *id, statements, *tail, None, expected); if let Some(target) = expected.only_has_type(&mut this.table) { - match this.coerce(Some(tgt_expr), &ty, &target, CoerceNever::Yes) { - Ok(res) => res, + match this.coerce( + tgt_expr.into(), + ty.to_nextsolver(this.table.interner), + target.to_nextsolver(this.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) { + Ok(res) => res.to_chalk(this.table.interner), Err(_) => { this.result.type_mismatches.insert( tgt_expr.into(), @@ -1204,13 +1293,21 @@ impl InferenceContext<'_> { (elem_ty, consteval::usize_const(self.db, Some(0), krate)) } Array::ElementList { elements, .. } => { - let mut coerce = CoerceMany::new(elem_ty); + let mut coerce = CoerceMany::with_coercion_sites( + elem_ty.to_nextsolver(self.table.interner), + elements, + ); for &expr in elements.iter() { let cur_elem_ty = self.infer_expr_inner(expr, &expected, ExprIsRead::Yes); - coerce.coerce(self, Some(expr), &cur_elem_ty, CoercionCause::Expr(expr)); + coerce.coerce( + self, + &ObligationCause::new(), + expr, + cur_elem_ty.to_nextsolver(self.table.interner), + ); } ( - coerce.complete(self), + coerce.complete(self).to_chalk(self.table.interner), consteval::usize_const(self.db, Some(elements.len() as u128), krate), ) } @@ -1249,11 +1346,17 @@ impl InferenceContext<'_> { .return_coercion .as_mut() .expect("infer_return called outside function body") - .expected_ty(); + .expected_ty() + .to_chalk(self.table.interner); let return_expr_ty = self.infer_expr_inner(expr, &Expectation::HasType(ret_ty), ExprIsRead::Yes); let mut coerce_many = self.return_coercion.take().unwrap(); - coerce_many.coerce(self, Some(expr), &return_expr_ty, CoercionCause::Expr(expr)); + coerce_many.coerce( + self, + &ObligationCause::new(), + expr, + return_expr_ty.to_nextsolver(self.table.interner), + ); self.return_coercion = Some(coerce_many); } @@ -1264,7 +1367,7 @@ impl InferenceContext<'_> { self.infer_return(expr); } else { let mut coerce = self.return_coercion.take().unwrap(); - coerce.coerce_forced_unit(self, CoercionCause::Expr(ret)); + coerce.coerce_forced_unit(self, ret, &ObligationCause::new(), true); self.return_coercion = Some(coerce); } } @@ -1281,7 +1384,7 @@ impl InferenceContext<'_> { fn infer_expr_become(&mut self, expr: ExprId) -> Ty { match &self.return_coercion { Some(return_coercion) => { - let ret_ty = return_coercion.expected_ty(); + let ret_ty = return_coercion.expected_ty().to_chalk(self.table.interner); let call_expr_ty = self.infer_expr_inner( expr, @@ -1414,15 +1517,16 @@ impl InferenceContext<'_> { None => self.err_ty(), }; - let ret_ty = self.normalize_associated_types_in(ret_ty); + let ret_ty = self.process_remote_user_written_ty(ret_ty); if self.is_builtin_binop(&lhs_ty, &rhs_ty, op) { // use knowledge of built-in binary ops, which can sometimes help inference let builtin_ret = self.enforce_builtin_binop_types(&lhs_ty, &rhs_ty, op); self.unify(&builtin_ret, &ret_ty); + builtin_ret + } else { + ret_ty } - - ret_ty } fn infer_block( @@ -1534,9 +1638,10 @@ impl InferenceContext<'_> { }; if this .coerce( - Some(expr), - &this.result.standard_types.unit.clone(), - &t, + expr.into(), + this.result.standard_types.unit.to_nextsolver(this.table.interner), + t.to_nextsolver(this.table.interner), + AllowTwoPhase::No, coerce_never, ) .is_err() @@ -1557,6 +1662,7 @@ impl InferenceContext<'_> { }); self.resolver.reset_to_guard(g); if let Some(prev_env) = prev_env { + self.table.param_env = prev_env.env.to_nextsolver(self.table.interner); self.table.trait_env = prev_env; } @@ -1568,50 +1674,49 @@ impl InferenceContext<'_> { receiver_ty: &Ty, name: &Name, ) -> Option<(Ty, Either, Vec, bool)> { - let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false, false); + let interner = self.table.interner; + let mut autoderef = self.table.autoderef(receiver_ty.to_nextsolver(self.table.interner)); let mut private_field = None; let res = autoderef.by_ref().find_map(|(derefed_ty, _)| { - let (field_id, parameters) = match derefed_ty.kind(Interner) { - TyKind::Tuple(_, substs) => { + let (field_id, parameters) = match derefed_ty.kind() { + crate::next_solver::TyKind::Tuple(substs) => { return name.as_tuple_index().and_then(|idx| { - substs - .as_slice(Interner) - .get(idx) - .map(|a| a.assert_ty_ref(Interner)) - .cloned() - .map(|ty| { - ( - Either::Right(TupleFieldId { - tuple: TupleId( - self.tuple_field_accesses_rev - .insert_full(substs.clone()) - .0 - as u32, - ), - index: idx as u32, - }), - ty, - ) - }) + substs.as_slice().get(idx).copied().map(|ty| { + ( + Either::Right(TupleFieldId { + tuple: TupleId( + self.tuple_field_accesses_rev + .insert_full(substs.to_chalk(interner)) + .0 as u32, + ), + index: idx as u32, + }), + ty.to_chalk(interner), + ) + }) }); } - &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref parameters) => { - let local_id = s.fields(self.db).field(name)?; - let field = FieldId { parent: s.into(), local_id }; - (field, parameters.clone()) - } - &TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), ref parameters) => { - let local_id = u.fields(self.db).field(name)?; - let field = FieldId { parent: u.into(), local_id }; - (field, parameters.clone()) - } + crate::next_solver::TyKind::Adt(adt, parameters) => match adt.def_id().0 { + hir_def::AdtId::StructId(s) => { + let local_id = s.fields(self.db).field(name)?; + let field = FieldId { parent: s.into(), local_id }; + (field, parameters) + } + hir_def::AdtId::UnionId(u) => { + let local_id = u.fields(self.db).field(name)?; + let field = FieldId { parent: u.into(), local_id }; + (field, parameters) + } + hir_def::AdtId::EnumId(_) => return None, + }, _ => return None, }; + let parameters: crate::Substitution = parameters.to_chalk(interner); let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id] .is_visible_from(self.db, self.resolver.module()); if !is_visible { if private_field.is_none() { - private_field = Some((field_id, parameters)); + private_field = Some((field_id, parameters.clone())); } return None; } @@ -1623,20 +1728,18 @@ impl InferenceContext<'_> { Some(match res { Some((field_id, ty)) => { - let adjustments = auto_deref_adjust_steps(&autoderef); - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); + let adjustments = autoderef.adjust_steps(); + let ty = self.process_remote_user_written_ty(ty); (ty, field_id, adjustments, true) } None => { let (field_id, subst) = private_field?; - let adjustments = auto_deref_adjust_steps(&autoderef); + let adjustments = autoderef.adjust_steps(); let ty = self.db.field_types(field_id.parent)[field_id.local_id] .clone() .substitute(Interner, &subst); - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); + let ty = self.process_remote_user_written_ty(ty); (ty, Either::Left(field_id), adjustments, false) } @@ -1675,7 +1778,8 @@ impl InferenceContext<'_> { None => { // no field found, lets attempt to resolve it like a function so that IDE things // work out while people are typing - let canonicalized_receiver = self.canonicalize(receiver_ty.clone()); + let canonicalized_receiver = + self.canonicalize(receiver_ty.clone().to_nextsolver(self.table.interner)); let resolved = method_resolution::lookup_method( self.db, &canonicalized_receiver, @@ -1720,11 +1824,13 @@ impl InferenceContext<'_> { expected: &Expectation, ) -> Ty { let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes); - let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true); + let interner = self.table.interner; + let mut derefs = self.table.autoderef(callee_ty.to_nextsolver(interner)); let (res, derefed_callee) = loop { let Some((callee_deref_ty, _)) = derefs.next() else { break (None, callee_ty.clone()); }; + let callee_deref_ty = callee_deref_ty.to_chalk(interner); if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) { break (Some(res), callee_deref_ty); } @@ -1735,28 +1841,30 @@ impl InferenceContext<'_> { derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs) || res.is_none(); let (param_tys, ret_ty) = match res { Some((func, params, ret_ty)) => { - let mut adjustments = auto_deref_adjust_steps(&derefs); - if let TyKind::Closure(c, _) = - self.table.resolve_completely(callee_ty.clone()).kind(Interner) - { - self.add_current_closure_dependency(*c); - self.deferred_closures.entry(*c).or_default().push(( - derefed_callee.clone(), - callee_ty.clone(), - params.clone(), - tgt_expr, - )); - } + let params_chalk = + params.iter().map(|param| param.to_chalk(interner)).collect::>(); + let mut adjustments = derefs.adjust_steps(); if let Some(fn_x) = func { self.write_fn_trait_method_resolution( fn_x, &derefed_callee, &mut adjustments, &callee_ty, - ¶ms, + ¶ms_chalk, tgt_expr, ); } + if let &TyKind::Closure(c, _) = + self.table.resolve_completely(callee_ty.clone()).kind(Interner) + { + self.add_current_closure_dependency(c.into()); + self.deferred_closures.entry(c.into()).or_default().push(( + derefed_callee.clone(), + callee_ty.clone(), + params_chalk, + tgt_expr, + )); + } self.write_expr_adj(callee, adjustments.into_boxed_slice()); (params, ret_ty) } @@ -1765,7 +1873,7 @@ impl InferenceContext<'_> { call_expr: tgt_expr, found: callee_ty.clone(), }); - (Vec::new(), self.err_ty()) + (Vec::new(), crate::next_solver::Ty::new_error(interner, ErrorGuaranteed)) } }; let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args); @@ -1786,29 +1894,24 @@ impl InferenceContext<'_> { tgt_expr: ExprId, args: &[ExprId], callee_ty: Ty, - param_tys: &[Ty], - ret_ty: Ty, + param_tys: &[crate::next_solver::Ty<'db>], + ret_ty: crate::next_solver::Ty<'db>, indices_to_skip: &[u32], is_varargs: bool, expected: &Expectation, ) -> Ty { self.register_obligations_for_call(&callee_ty); - let expected_inputs = self.expected_inputs_for_expected_output( - expected, - ret_ty.clone(), - param_tys.to_owned(), - ); - self.check_call_arguments( tgt_expr, - args, - &expected_inputs, param_tys, + ret_ty, + expected, + args, indices_to_skip, is_varargs, ); - self.normalize_associated_types_in(ret_ty) + self.table.normalize_associated_types_in_ns(ret_ty).to_chalk(self.table.interner) } fn infer_method_call( @@ -1821,7 +1924,23 @@ impl InferenceContext<'_> { expected: &Expectation, ) -> Ty { let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none(), ExprIsRead::Yes); - let canonicalized_receiver = self.canonicalize(receiver_ty.clone()); + let receiver_ty = self.table.structurally_resolve_type(&receiver_ty); + + if matches!( + receiver_ty.kind(Interner), + TyKind::Error | TyKind::InferenceVar(_, TyVariableKind::General) + ) { + // Don't probe on error type, or on a fully unresolved infer var. + // FIXME: Emit an error if we're probing on an infer var (type annotations needed). + for &arg in args { + // Make sure we infer and record the arguments. + self.infer_expr_no_expect(arg, ExprIsRead::Yes); + } + return receiver_ty; + } + + let canonicalized_receiver = + self.canonicalize(receiver_ty.clone().to_nextsolver(self.table.interner)); let resolved = method_resolution::lookup_method( self.db, @@ -1912,14 +2031,21 @@ impl InferenceContext<'_> { tgt_expr, args, callee_ty, - sig.params().get(strip_first as usize..).unwrap_or(&[]), - sig.ret().clone(), + &sig.params() + .get(strip_first as usize..) + .unwrap_or(&[]) + .iter() + .map(|param| param.to_nextsolver(self.table.interner)) + .collect::>(), + sig.ret().to_nextsolver(self.table.interner), &[], true, expected, ), None => { - self.check_call_arguments(tgt_expr, args, &[], &[], &[], true); + for &arg in args.iter() { + self.infer_expr_no_expect(arg, ExprIsRead::Yes); + } self.err_ty() } } @@ -1938,147 +2064,252 @@ impl InferenceContext<'_> { ) -> Ty { let method_ty = method_ty.substitute(Interner, &substs); self.register_obligations_for_call(&method_ty); + let interner = self.table.interner; let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) = match method_ty.callable_sig(self.db) { Some(sig) => ( if !sig.params().is_empty() { - (sig.params()[0].clone(), sig.params()[1..].to_vec()) + ( + sig.params()[0].to_nextsolver(interner), + sig.params()[1..] + .iter() + .map(|param| param.to_nextsolver(interner)) + .collect(), + ) } else { - (self.err_ty(), Vec::new()) + (crate::next_solver::Ty::new_error(interner, ErrorGuaranteed), Vec::new()) }, - sig.ret().clone(), + sig.ret().to_nextsolver(interner), sig.is_varargs, ), - None => ((self.err_ty(), Vec::new()), self.err_ty(), true), + None => { + let formal_receiver_ty = self.table.next_ty_var(); + let ret_ty = self.table.next_ty_var(); + ((formal_receiver_ty, Vec::new()), ret_ty, true) + } }; - self.unify(&formal_receiver_ty, &receiver_ty); + self.table.unify_ns(formal_receiver_ty, receiver_ty.to_nextsolver(interner)); - let expected_inputs = - self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone()); - - self.check_call_arguments(tgt_expr, args, &expected_inputs, ¶m_tys, &[], is_varargs); - self.normalize_associated_types_in(ret_ty) + self.check_call_arguments(tgt_expr, ¶m_tys, ret_ty, expected, args, &[], is_varargs); + self.table.normalize_associated_types_in_ns(ret_ty).to_chalk(interner) } - fn expected_inputs_for_expected_output( + /// Generic function that factors out common logic from function calls, + /// method calls and overloaded operators. + pub(in super::super) fn check_call_arguments( &mut self, - expected_output: &Expectation, - output: Ty, - inputs: Vec, - ) -> Vec { - if let Some(expected_ty) = expected_output.only_has_type(&mut self.table) { - self.table.fudge_inference(|table| { - if table.try_unify(&expected_ty, &output).is_ok() { - table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind { - chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner), - chalk_ir::VariableKind::Lifetime => { - var.to_lifetime(Interner).cast(Interner) - } - chalk_ir::VariableKind::Const(ty) => { - var.to_const(Interner, ty).cast(Interner) + call_expr: ExprId, + // Types (as defined in the *signature* of the target function) + formal_input_tys: &[crate::next_solver::Ty<'db>], + formal_output: crate::next_solver::Ty<'db>, + // Expected output from the parent expression or statement + expectation: &Expectation, + // The expressions for each provided argument + provided_args: &[ExprId], + skip_indices: &[u32], + // Whether the function is variadic, for example when imported from C + c_variadic: bool, + ) { + let interner = self.table.interner; + + // First, let's unify the formal method signature with the expectation eagerly. + // We use this to guide coercion inference; it's output is "fudged" which means + // any remaining type variables are assigned to new, unrelated variables. This + // is because the inference guidance here is only speculative. + let formal_output = self.table.resolve_vars_with_obligations(formal_output); + let expected_input_tys: Option> = expectation + .only_has_type(&mut self.table) + .and_then(|expected_output| { + self.table + .infer_ctxt + .fudge_inference_if_ok(|| { + let mut ocx = ObligationCtxt::new(&self.table.infer_ctxt); + + // Attempt to apply a subtyping relationship between the formal + // return type (likely containing type variables if the function + // is polymorphic) and the expected return type. + // No argument expectations are produced if unification fails. + let origin = ObligationCause::new(); + ocx.sup( + &origin, + self.table.param_env, + expected_output.to_nextsolver(interner), + formal_output, + )?; + if !ocx.select_where_possible().is_empty() { + return Err(crate::next_solver::TypeError::Mismatch); } + + // Record all the argument types, with the args + // produced from the above subtyping unification. + Ok(Some( + formal_input_tys + .iter() + .map(|&ty| self.table.infer_ctxt.resolve_vars_if_possible(ty)) + .collect(), + )) }) - } else { - Vec::new() - } + .ok() }) + .unwrap_or_default(); + + // If there are no external expectations at the call site, just use the types from the function defn + let expected_input_tys = if let Some(expected_input_tys) = &expected_input_tys { + assert_eq!(expected_input_tys.len(), formal_input_tys.len()); + expected_input_tys } else { - Vec::new() - } - } + formal_input_tys + }; - fn check_call_arguments( - &mut self, - expr: ExprId, - args: &[ExprId], - expected_inputs: &[Ty], - param_tys: &[Ty], - skip_indices: &[u32], - ignore_arg_param_mismatch: bool, - ) { - let arg_count_mismatch = - !ignore_arg_param_mismatch && args.len() != param_tys.len() + skip_indices.len(); - if arg_count_mismatch { + let minimum_input_count = expected_input_tys.len(); + let provided_arg_count = provided_args.len() - skip_indices.len(); + + // Keep track of whether we *could possibly* be satisfied, i.e. whether we're on the happy path + // if the wrong number of arguments were supplied, we CAN'T be satisfied, + // and if we're c_variadic, the supplied arguments must be >= the minimum count from the function + // otherwise, they need to be identical, because rust doesn't currently support variadic functions + let args_count_matches = if c_variadic { + provided_arg_count >= minimum_input_count + } else { + provided_arg_count == minimum_input_count + }; + + if !args_count_matches { self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount { - call_expr: expr, - expected: param_tys.len() + skip_indices.len(), - found: args.len(), + call_expr, + expected: expected_input_tys.len() + skip_indices.len(), + found: provided_args.len(), }); + } + + // We introduce a helper function to demand that a given argument satisfy a given input + // This is more complicated than just checking type equality, as arguments could be coerced + // This version writes those types back so further type checking uses the narrowed types + let demand_compatible = |this: &mut InferenceContext<'db>, idx| { + let formal_input_ty: crate::next_solver::Ty<'db> = formal_input_tys[idx]; + let expected_input_ty: crate::next_solver::Ty<'db> = expected_input_tys[idx]; + let provided_arg = provided_args[idx]; + + debug!("checking argument {}: {:?} = {:?}", idx, provided_arg, formal_input_ty); + + // We're on the happy path here, so we'll do a more involved check and write back types + // To check compatibility, we'll do 3 things: + // 1. Unify the provided argument with the expected type + let expectation = Expectation::rvalue_hint(this, expected_input_ty.to_chalk(interner)); + + let checked_ty = this + .infer_expr_inner(provided_arg, &expectation, ExprIsRead::Yes) + .to_nextsolver(interner); + + // 2. Coerce to the most detailed type that could be coerced + // to, which is `expected_ty` if `rvalue_hint` returns an + // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise. + let coerced_ty = expectation + .only_has_type(&mut this.table) + .map(|it| it.to_nextsolver(interner)) + .unwrap_or(formal_input_ty); + + // Cause selection errors caused by resolving a single argument to point at the + // argument and not the call. This lets us customize the span pointed to in the + // fulfillment error to be more accurate. + let coerced_ty = this.table.resolve_vars_with_obligations(coerced_ty); + + let coerce_never = if this + .expr_guaranteed_to_constitute_read_for_never(provided_arg, ExprIsRead::Yes) + { + CoerceNever::Yes + } else { + CoerceNever::No + }; + let coerce_error = this + .coerce( + provided_arg.into(), + checked_ty, + coerced_ty, + AllowTwoPhase::Yes, + coerce_never, + ) + .err(); + if coerce_error.is_some() { + return Err((coerce_error, coerced_ty, checked_ty)); + } + + // 3. Check if the formal type is actually equal to the checked one + // and register any such obligations for future type checks. + let formal_ty_error = this + .table + .infer_ctxt + .at(&ObligationCause::new(), this.table.param_env) + .eq(DefineOpaqueTypes::Yes, formal_input_ty, coerced_ty); + + // If neither check failed, the types are compatible + match formal_ty_error { + Ok(crate::next_solver::infer::InferOk { obligations, value: () }) => { + this.table.register_predicates(obligations); + Ok(()) + } + Err(err) => Err((Some(err), coerced_ty, checked_ty)), + } }; - // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 -- + // Check the arguments. // We do this in a pretty awful way: first we type-check any arguments // that are not closures, then we type-check the closures. This is so // that we have more information about the types of arguments when we // type-check the functions. This isn't really the right way to do this. for check_closures in [false, true] { - let mut skip_indices = skip_indices.iter().copied().fuse().peekable(); - let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty())); - let expected_iter = expected_inputs - .iter() - .cloned() - .chain(param_iter.clone().skip(expected_inputs.len())); - for (idx, ((&arg, param_ty), expected_ty)) in - args.iter().zip(param_iter).zip(expected_iter).enumerate() - { - let is_closure = matches!(&self.body[arg], Expr::Closure { .. }); - if is_closure != check_closures { + // More awful hacks: before we check argument types, try to do + // an "opportunistic" trait resolution of any trait bounds on + // the call. This helps coercions. + if check_closures { + self.table.select_obligations_where_possible(); + } + + let mut skip_indices = skip_indices.iter().copied(); + // Check each argument, to satisfy the input it was provided for + // Visually, we're traveling down the diagonal of the compatibility matrix + for (idx, arg) in provided_args.iter().enumerate() { + if skip_indices.clone().next() == Some(idx as u32) { + skip_indices.next(); continue; } - while skip_indices.peek().is_some_and(|&i| i < idx as u32) { - skip_indices.next(); + // For this check, we do *not* want to treat async coroutine closures (async blocks) + // as proper closures. Doing so would regress type inference when feeding + // the return value of an argument-position async block to an argument-position + // closure wrapped in a block. + // See . + let is_closure = if let Expr::Closure { closure_kind, .. } = self.body[*arg] { + !matches!(closure_kind, ClosureKind::Coroutine(_)) + } else { + false + }; + if is_closure != check_closures { + continue; } - if skip_indices.peek().copied() == Some(idx as u32) { + + if idx >= minimum_input_count { + // Make sure we've checked this expr at least once. + self.infer_expr_no_expect(*arg, ExprIsRead::Yes); continue; } - // the difference between param_ty and expected here is that - // expected is the parameter when the expected *return* type is - // taken into account. So in `let _: &[i32] = identity(&[1, 2])` - // the expected type is already `&[i32]`, whereas param_ty is - // still an unbound type variable. We don't always want to force - // the parameter to coerce to the expected type (for example in - // `coerce_unsize_expected_type_4`). - let param_ty = self.normalize_associated_types_in(param_ty); - let expected_ty = self.normalize_associated_types_in(expected_ty); - let expected = Expectation::rvalue_hint(self, expected_ty); - // infer with the expected type we have... - let ty = self.infer_expr_inner(arg, &expected, ExprIsRead::Yes); - - // then coerce to either the expected type or just the formal parameter type - let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) { - // if we are coercing to the expectation, unify with the - // formal parameter type to connect everything - self.unify(&ty, ¶m_ty); - ty - } else { - param_ty - }; - // The function signature may contain some unknown types, so we need to insert - // type vars here to avoid type mismatch false positive. - let coercion_target = self.insert_type_vars(coercion_target); - - // Any expression that produces a value of type `!` must have diverged, - // unless it's a place expression that isn't being read from, in which case - // diverging would be unsound since we may never actually read the `!`. - // e.g. `let _ = *never_ptr;` with `never_ptr: *const !`. - let coerce_never = - if self.expr_guaranteed_to_constitute_read_for_never(arg, ExprIsRead::Yes) { - CoerceNever::Yes - } else { - CoerceNever::No - }; - if self.coerce(Some(arg), &ty, &coercion_target, coerce_never).is_err() - && !arg_count_mismatch + if let Err((_error, expected, found)) = demand_compatible(self, idx) + && args_count_matches { + // Don't report type mismatches if there is a mismatch in args count. self.result.type_mismatches.insert( - arg.into(), - TypeMismatch { expected: coercion_target, actual: ty.clone() }, + (*arg).into(), + TypeMismatch { + expected: expected.to_chalk(interner), + actual: found.to_chalk(interner), + }, ); } } } + + if !args_count_matches {} } fn substs_for_method_call( @@ -2227,7 +2458,7 @@ impl InferenceContext<'_> { } fn register_obligations_for_call(&mut self, callable_ty: &Ty) { - let callable_ty = self.resolve_ty_shallow(callable_ty); + let callable_ty = self.table.structurally_resolve_type(callable_ty); if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) { let def: CallableDefId = from_chalk(self.db, *fn_def); let generic_predicates = @@ -2316,9 +2547,9 @@ impl InferenceContext<'_> { /// Dereferences a single level of immutable referencing. fn deref_ty_if_possible(&mut self, ty: &Ty) -> Ty { - let ty = self.resolve_ty_shallow(ty); + let ty = self.table.structurally_resolve_type(ty); match ty.kind(Interner) { - TyKind::Ref(Mutability::Not, _, inner) => self.resolve_ty_shallow(inner), + TyKind::Ref(Mutability::Not, _, inner) => self.table.structurally_resolve_type(inner), _ => ty, } } @@ -2438,10 +2669,22 @@ impl InferenceContext<'_> { cb: impl FnOnce(&mut Self) -> T, ) -> (Option, T) { self.breakables.push({ - BreakableContext { kind, may_break: false, coerce: ty.map(CoerceMany::new), label } + BreakableContext { + kind, + may_break: false, + coerce: ty.map(|ty| CoerceMany::new(ty.to_nextsolver(self.table.interner))), + label, + } }); let res = cb(self); let ctx = self.breakables.pop().expect("breakable stack broken"); - (if ctx.may_break { ctx.coerce.map(|ctx| ctx.complete(self)) } else { None }, res) + ( + if ctx.may_break { + ctx.coerce.map(|ctx| ctx.complete(self).to_chalk(self.table.interner)) + } else { + None + }, + res, + ) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 707bec0fce4ce..6e11fa942bdfb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -10,6 +10,8 @@ use hir_def::{ use hir_expand::name::Name; use stdx::TupleExt; +use crate::infer::AllowTwoPhase; +use crate::next_solver::mapping::{ChalkToNextSolver, NextSolverToChalk}; use crate::{ DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty, TyBuilder, TyExt, TyKind, @@ -88,7 +90,7 @@ impl InferenceContext<'_> { Some(substs) => f.substitute(Interner, substs), None => f.substitute(Interner, &Substitution::empty(Interner)), }; - self.normalize_associated_types_in(expected_ty) + self.process_remote_user_written_ty(expected_ty) } None => self.err_ty(), } @@ -152,7 +154,7 @@ impl InferenceContext<'_> { Some(substs) => f.substitute(Interner, substs), None => f.substitute(Interner, &Substitution::empty(Interner)), }; - self.normalize_associated_types_in(expected_ty) + self.process_remote_user_written_ty(expected_ty) } None => { self.push_diagnostic(InferenceDiagnostic::NoSuchField { @@ -190,7 +192,7 @@ impl InferenceContext<'_> { subs: &[PatId], decl: Option, ) -> Ty { - let expected = self.resolve_ty_shallow(expected); + let expected = self.table.structurally_resolve_type(expected); let expectations = match expected.as_tuple() { Some(parameters) => parameters.as_slice(Interner), _ => &[], @@ -238,7 +240,7 @@ impl InferenceContext<'_> { mut default_bm: BindingMode, decl: Option, ) -> Ty { - let mut expected = self.resolve_ty_shallow(expected); + let mut expected = self.table.structurally_resolve_type(expected); if matches!(&self.body[pat], Pat::Ref { .. }) || self.inside_assignment { cov_mark::hit!(match_ergonomics_ref); @@ -251,7 +253,7 @@ impl InferenceContext<'_> { let mut pat_adjustments = Vec::new(); while let Some((inner, _lifetime, mutability)) = expected.as_reference() { pat_adjustments.push(expected.clone()); - expected = self.resolve_ty_shallow(inner); + expected = self.table.structurally_resolve_type(inner); default_bm = match default_bm { BindingMode::Move => BindingMode::Ref(mutability), BindingMode::Ref(Mutability::Not) => BindingMode::Ref(Mutability::Not), @@ -303,16 +305,15 @@ impl InferenceContext<'_> { Pat::Path(path) => { let ty = self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty()); let ty_inserted_vars = self.insert_type_vars_shallow(ty.clone()); - match self.table.coerce(&expected, &ty_inserted_vars, CoerceNever::Yes) { - Ok((adjustments, coerced_ty)) => { - if !adjustments.is_empty() { - self.result - .pat_adjustments - .entry(pat) - .or_default() - .extend(adjustments.into_iter().map(|adjust| adjust.target)); - } - self.write_pat_ty(pat, coerced_ty); + match self.coerce( + pat.into(), + expected.to_nextsolver(self.table.interner), + ty_inserted_vars.to_nextsolver(self.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) { + Ok(coerced_ty) => { + self.write_pat_ty(pat, coerced_ty.to_chalk(self.table.interner)); return self.pat_ty_after_adjustment(pat); } Err(_) => { @@ -387,8 +388,14 @@ impl InferenceContext<'_> { ); // We are returning early to avoid the unifiability check below. let lhs_ty = self.insert_type_vars_shallow(result); - let ty = match self.coerce(None, &expected, &lhs_ty, CoerceNever::Yes) { - Ok(ty) => ty, + let ty = match self.coerce( + pat.into(), + expected.to_nextsolver(self.table.interner), + lhs_ty.to_nextsolver(self.table.interner), + AllowTwoPhase::No, + CoerceNever::Yes, + ) { + Ok(ty) => ty.to_chalk(self.table.interner), Err(_) => { self.result.type_mismatches.insert( pat.into(), @@ -494,7 +501,7 @@ impl InferenceContext<'_> { default_bm: BindingMode, decl: Option, ) -> Ty { - let expected = self.resolve_ty_shallow(expected); + let expected = self.table.structurally_resolve_type(expected); // If `expected` is an infer ty, we try to equate it to an array if the given pattern // allows it. See issue #16609 @@ -506,7 +513,7 @@ impl InferenceContext<'_> { self.unify(&expected, &resolved_array_ty); } - let expected = self.resolve_ty_shallow(&expected); + let expected = self.table.structurally_resolve_type(&expected); let elem_ty = match expected.kind(Interner) { TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(), _ => self.err_ty(), @@ -542,7 +549,7 @@ impl InferenceContext<'_> { if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] && let Some((inner, ..)) = expected.as_reference() { - let inner = self.resolve_ty_shallow(inner); + let inner = self.table.structurally_resolve_type(inner); if matches!(inner.kind(Interner), TyKind::Slice(_)) { let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner); let slice_ty = TyKind::Slice(elem_ty).intern(Interner); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index bc8648ecdd943..80f7324e58b2b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -10,20 +10,20 @@ use hir_expand::name::Name; use stdx::never; use crate::{ - InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, - TyKind, ValueTyDefId, + InferenceDiagnostic, Interner, LifetimeElisionKind, Substitution, TraitRef, TraitRefExt, Ty, + TyBuilder, TyExt, TyKind, ValueTyDefId, builder::ParamKind, consteval, error_lifetime, generics::generics, infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext, - lower::LifetimeElisionKind, method_resolution::{self, VisibleFromModule}, + next_solver::mapping::ChalkToNextSolver, to_chalk_trait_id, }; use super::{ExprOrPatId, InferenceContext, InferenceTyDiagnosticSource}; -impl InferenceContext<'_> { +impl<'db> InferenceContext<'db> { pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option { let (value_def, generic_def, substs) = match self.resolve_value_path(path, id)? { ValuePathResolution::GenericDef(value_def, generic_def, substs) => { @@ -31,13 +31,13 @@ impl InferenceContext<'_> { } ValuePathResolution::NonGeneric(ty) => return Some(ty), }; - let substs = self.insert_type_vars(substs); - let substs = self.normalize_associated_types_in(substs); + let substs = + self.process_remote_user_written_ty::<_, crate::next_solver::GenericArgs<'db>>(substs); self.add_required_obligations_for_value_path(generic_def, &substs); let ty = self.db.value_ty(value_def)?.substitute(Interner, &substs); - let ty = self.normalize_associated_types_in(ty); + let ty = self.process_remote_user_written_ty(ty); Some(ty) } @@ -173,14 +173,12 @@ impl InferenceContext<'_> { let last = path.segments().last()?; let (ty, orig_ns) = path_ctx.ty_ctx().lower_ty_ext(type_ref); - let ty = self.table.insert_type_vars(ty); - let ty = self.table.normalize_associated_types_in(ty); + let ty = self.table.process_user_written_ty(ty); path_ctx.ignore_last_segment(); let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns, true); drop_ctx(ctx, no_diagnostics); - let ty = self.table.insert_type_vars(ty); - let ty = self.table.normalize_associated_types_in(ty); + let ty = self.table.process_user_written_ty(ty); self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? } else { let hygiene = self.body.expr_or_pat_path_hygiene(id); @@ -223,8 +221,7 @@ impl InferenceContext<'_> { return None; } - let ty = self.insert_type_vars(ty); - let ty = self.normalize_associated_types_in(ty); + let ty = self.process_user_written_ty(ty); self.resolve_ty_assoc_item(ty, last_segment.name, id) } @@ -322,7 +319,7 @@ impl InferenceContext<'_> { return Some(result); } - let canonical_ty = self.canonicalize(ty.clone()); + let canonical_ty = self.canonicalize(ty.clone().to_nextsolver(self.table.interner)); let mut not_visible = None; let res = method_resolution::iterate_method_candidates( @@ -392,7 +389,7 @@ impl InferenceContext<'_> { name: &Name, id: ExprOrPatId, ) -> Option<(ValueNs, Substitution)> { - let ty = self.resolve_ty_shallow(ty); + let ty = self.table.structurally_resolve_type(ty); let (enum_id, subst) = match ty.as_adt() { Some((AdtId::EnumId(e), subst)) => (e, subst), _ => return None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index c07755535f2a6..1687857ae1ac2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -1,125 +1,118 @@ //! Unification and canonicalization logic. -use std::{fmt, mem}; +use std::fmt; use chalk_ir::{ - CanonicalVarKind, FloatTy, IntTy, TyVariableKind, UniverseIndex, cast::Cast, - fold::TypeFoldable, interner::HasInterner, zip::Zip, + CanonicalVarKind, FloatTy, IntTy, TyVariableKind, cast::Cast, fold::TypeFoldable, + interner::HasInterner, }; -use chalk_solve::infer::ParameterEnaVariableExt; use either::Either; -use ena::unify::UnifyKey; use hir_def::{AdtId, lang_item::LangItem}; use hir_expand::name::Name; use intern::sym; -use rustc_hash::FxHashMap; +use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_type_ir::inherent::Ty as _; +use rustc_type_ir::{ + FloatVid, IntVid, TyVid, TypeVisitableExt, + inherent::{IntoKind, Span, Term as _}, + relate::{Relate, solver_relating::RelateExt}, + solve::{Certainty, GoalSource, NoSolution}, +}; use smallvec::SmallVec; use triomphe::Arc; -use super::{InferOk, InferResult, InferenceContext, TypeError}; +use super::{InferResult, InferenceContext, TypeError}; +use crate::next_solver::ErrorGuaranteed; use crate::{ - AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue, DebruijnIndex, DomainGoal, - GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment, InferenceVar, Interner, - Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution, Substitution, - TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, VariableKind, WhereClause, - consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts, - to_chalk_trait_id, traits::FnTrait, + AliasTy, BoundVar, Canonical, Const, ConstValue, DebruijnIndex, GenericArg, GenericArgData, + Goal, GoalData, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, + ProjectionTy, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, + VariableKind, WhereClause, + consteval::unknown_const, + db::HirDatabase, + fold_generic_args, fold_tys_and_consts, + next_solver::infer::InferOk, + next_solver::{ + self, ClauseKind, DbInterner, ParamEnv, Predicate, PredicateKind, SolverDefIds, Term, + fulfill::FulfillmentCtxt, + infer::{ + DbInternerInferExt, InferCtxt, + snapshot::CombinedSnapshot, + traits::{Obligation, ObligationCause}, + }, + inspect::{InspectConfig, InspectGoal, ProofTreeVisitor}, + mapping::{ChalkToNextSolver, NextSolverToChalk}, + }, + to_chalk_trait_id, + traits::{ + FnTrait, NextTraitSolveResult, next_trait_solve_canonical_in_ctxt, next_trait_solve_in_ctxt, + }, }; -impl InferenceContext<'_> { - pub(super) fn canonicalize(&mut self, t: T) -> Canonical +impl<'db> InferenceContext<'db> { + pub(super) fn canonicalize(&mut self, t: T) -> rustc_type_ir::Canonical, T> where - T: TypeFoldable + HasInterner, + T: rustc_type_ir::TypeFoldable>, { self.table.canonicalize(t) } +} - pub(super) fn clauses_for_self_ty( - &mut self, - self_ty: InferenceVar, - ) -> SmallVec<[WhereClause; 4]> { - self.table.resolve_obligations_as_possible(); - - let root = self.table.var_unification_table.inference_var_root(self_ty); - let pending_obligations = mem::take(&mut self.table.pending_obligations); - let obligations = pending_obligations - .iter() - .filter_map(|obligation| match obligation.value.value.goal.data(Interner) { - GoalData::DomainGoal(DomainGoal::Holds(clause)) => { - let ty = match clause { - WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(projection), - .. - }) => projection.self_type_parameter(self.db), - WhereClause::Implemented(trait_ref) => { - trait_ref.self_type_parameter(Interner) - } - WhereClause::TypeOutlives(to) => to.ty.clone(), - _ => return None, - }; +struct NestedObligationsForSelfTy<'a, 'db> { + ctx: &'a InferenceTable<'db>, + self_ty: TyVid, + root_cause: &'a ObligationCause, + obligations_for_self_ty: &'a mut SmallVec<[Obligation<'db, Predicate<'db>>; 4]>, +} - let uncanonical = - chalk_ir::Substitute::apply(&obligation.free_vars, ty, Interner); - if matches!( - self.resolve_ty_shallow(&uncanonical).kind(Interner), - TyKind::InferenceVar(iv, TyVariableKind::General) if *iv == root, - ) { - Some(chalk_ir::Substitute::apply( - &obligation.free_vars, - clause.clone(), - Interner, - )) - } else { - None - } - } - _ => None, - }) - .collect(); - self.table.pending_obligations = pending_obligations; +impl<'a, 'db> ProofTreeVisitor<'db> for NestedObligationsForSelfTy<'a, 'db> { + type Result = (); - obligations + fn config(&self) -> InspectConfig { + // Using an intentionally low depth to minimize the chance of future + // breaking changes in case we adapt the approach later on. This also + // avoids any hangs for exponentially growing proof trees. + InspectConfig { max_depth: 5 } } -} -#[derive(Debug, Clone)] -pub(crate) struct Canonicalized -where - T: HasInterner, -{ - pub(crate) value: Canonical, - free_vars: Vec, -} + fn visit_goal(&mut self, inspect_goal: &InspectGoal<'_, 'db>) { + // No need to walk into goal subtrees that certainly hold, since they + // wouldn't then be stalled on an infer var. + if inspect_goal.result() == Ok(Certainty::Yes) { + return; + } -impl> Canonicalized { - pub(crate) fn apply_solution( - &self, - ctx: &mut InferenceTable<'_>, - solution: Canonical, - ) { - // the solution may contain new variables, which we need to convert to new inference vars - let new_vars = Substitution::from_iter( - Interner, - solution.binders.iter(Interner).map(|k| match &k.kind { - VariableKind::Ty(TyVariableKind::General) => ctx.new_type_var().cast(Interner), - VariableKind::Ty(TyVariableKind::Integer) => ctx.new_integer_var().cast(Interner), - VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner), - // Chalk can sometimes return new lifetime variables. We just replace them by errors - // for now. - VariableKind::Lifetime => ctx.new_lifetime_var().cast(Interner), - VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner), - }), - ); - for (i, v) in solution.value.iter(Interner).enumerate() { - let var = &self.free_vars[i]; - if let Some(ty) = v.ty(Interner) { - // eagerly replace projections in the type; we may be getting types - // e.g. from where clauses where this hasn't happened yet - let ty = ctx.normalize_associated_types_in(new_vars.apply(ty.clone(), Interner)); - ctx.unify(var.assert_ty_ref(Interner), &ty); - } else { - let _ = ctx.try_unify(var, &new_vars.apply(v.clone(), Interner)); - } + let db = self.ctx.interner; + let goal = inspect_goal.goal(); + if self.ctx.predicate_has_self_ty(goal.predicate, self.self_ty) + // We do not push the instantiated forms of goals as it would cause any + // aliases referencing bound vars to go from having escaping bound vars to + // being able to be normalized to an inference variable. + // + // This is mostly just a hack as arbitrary nested goals could still contain + // such aliases while having a different `GoalSource`. Closure signature inference + // however can't really handle *every* higher ranked `Fn` goal also being present + // in the form of `?c: Fn<(>::Assoc)`. + // + // This also just better matches the behaviour of the old solver where we do not + // encounter instantiated forms of goals, only nested goals that referred to bound + // vars from instantiated goals. + && !matches!(inspect_goal.source(), GoalSource::InstantiateHigherRanked) + { + self.obligations_for_self_ty.push(Obligation::new( + db, + self.root_cause.clone(), + goal.param_env, + goal.predicate, + )); + } + + // If there's a unique way to prove a given goal, recurse into + // that candidate. This means that for `impl Trait for () {}` + // and a `(): Trait` goal we recurse into the impl and look at + // the nested `?0: FnOnce(u32)` goal. + if let Some(candidate) = inspect_goal.unique_applicable_candidate() { + candidate.visit_nested_no_probe(self) } } } @@ -153,7 +146,7 @@ pub fn could_unify_deeply( let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars); let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars); - table.resolve_obligations_as_possible(); + table.select_obligations_where_possible(); table.propagate_diverging_flag(); let ty1_with_vars = table.resolve_completely(ty1_with_vars); let ty2_with_vars = table.resolve_completely(ty2_with_vars); @@ -219,37 +212,118 @@ bitflags::bitflags! { } } -type ChalkInferenceTable = chalk_solve::infer::InferenceTable; - #[derive(Clone)] -pub(crate) struct InferenceTable<'a> { - pub(crate) db: &'a dyn HirDatabase, +pub(crate) struct InferenceTable<'db> { + pub(crate) db: &'db dyn HirDatabase, + pub(crate) interner: DbInterner<'db>, pub(crate) trait_env: Arc, + pub(crate) param_env: ParamEnv<'db>, pub(crate) tait_coercion_table: Option>, - var_unification_table: ChalkInferenceTable, - type_variable_table: SmallVec<[TypeVariableFlags; 16]>, - pending_obligations: Vec>>, - /// Double buffer used in [`Self::resolve_obligations_as_possible`] to cut down on - /// temporary allocations. - resolve_obligations_buffer: Vec>>, + pub(crate) infer_ctxt: InferCtxt<'db>, + diverging_tys: FxHashSet, + pub(super) fulfillment_cx: FulfillmentCtxt<'db>, } -pub(crate) struct InferenceTableSnapshot { - var_table_snapshot: chalk_solve::infer::InferenceSnapshot, - type_variable_table: SmallVec<[TypeVariableFlags; 16]>, - pending_obligations: Vec>>, +pub(crate) struct InferenceTableSnapshot<'db> { + ctxt_snapshot: CombinedSnapshot, + obligations: FulfillmentCtxt<'db>, + diverging_tys: FxHashSet, } -impl<'a> InferenceTable<'a> { - pub(crate) fn new(db: &'a dyn HirDatabase, trait_env: Arc) -> Self { +impl<'db> InferenceTable<'db> { + pub(crate) fn new(db: &'db dyn HirDatabase, trait_env: Arc) -> Self { + let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block); + let infer_ctxt = interner.infer_ctxt().build(rustc_type_ir::TypingMode::Analysis { + defining_opaque_types_and_generators: SolverDefIds::new_from_iter(interner, []), + }); InferenceTable { db, + interner, + param_env: trait_env.env.to_nextsolver(interner), trait_env, tait_coercion_table: None, - var_unification_table: ChalkInferenceTable::new(), - type_variable_table: SmallVec::new(), - pending_obligations: Vec::new(), - resolve_obligations_buffer: Vec::new(), + fulfillment_cx: FulfillmentCtxt::new(&infer_ctxt), + infer_ctxt, + diverging_tys: FxHashSet::default(), + } + } + + pub(crate) fn type_var_is_sized(&self, self_ty: TyVid) -> bool { + let Some(sized_did) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else { + return true; + }; + self.obligations_for_self_ty(self_ty).into_iter().any(|obligation| { + match obligation.predicate.kind().skip_binder() { + crate::next_solver::PredicateKind::Clause( + crate::next_solver::ClauseKind::Trait(data), + ) => data.def_id().0 == sized_did, + _ => false, + } + }) + } + + pub(super) fn obligations_for_self_ty( + &self, + self_ty: TyVid, + ) -> SmallVec<[Obligation<'db, Predicate<'db>>; 4]> { + let obligations = self.fulfillment_cx.pending_obligations(); + let mut obligations_for_self_ty = SmallVec::new(); + for obligation in obligations { + let mut visitor = NestedObligationsForSelfTy { + ctx: self, + self_ty, + obligations_for_self_ty: &mut obligations_for_self_ty, + root_cause: &obligation.cause, + }; + + let goal = obligation.as_goal(); + self.infer_ctxt.visit_proof_tree(goal, &mut visitor); + } + + obligations_for_self_ty.retain_mut(|obligation| { + obligation.predicate = self.infer_ctxt.resolve_vars_if_possible(obligation.predicate); + !obligation.predicate.has_placeholders() + }); + obligations_for_self_ty + } + + fn predicate_has_self_ty(&self, predicate: Predicate<'db>, expected_vid: TyVid) -> bool { + match predicate.kind().skip_binder() { + PredicateKind::Clause(ClauseKind::Trait(data)) => { + self.type_matches_expected_vid(expected_vid, data.self_ty()) + } + PredicateKind::Clause(ClauseKind::Projection(data)) => { + self.type_matches_expected_vid(expected_vid, data.projection_term.self_ty()) + } + PredicateKind::Clause(ClauseKind::ConstArgHasType(..)) + | PredicateKind::Subtype(..) + | PredicateKind::Coerce(..) + | PredicateKind::Clause(ClauseKind::RegionOutlives(..)) + | PredicateKind::Clause(ClauseKind::TypeOutlives(..)) + | PredicateKind::Clause(ClauseKind::WellFormed(..)) + | PredicateKind::DynCompatible(..) + | PredicateKind::NormalizesTo(..) + | PredicateKind::AliasRelate(..) + | PredicateKind::Clause(ClauseKind::ConstEvaluatable(..)) + | PredicateKind::ConstEquate(..) + | PredicateKind::Clause(ClauseKind::HostEffect(..)) + | PredicateKind::Clause(ClauseKind::UnstableFeature(_)) + | PredicateKind::Ambiguous => false, + } + } + + fn type_matches_expected_vid( + &self, + expected_vid: TyVid, + ty: crate::next_solver::Ty<'db>, + ) -> bool { + let ty = self.shallow_resolve(ty); + + match ty.kind() { + crate::next_solver::TyKind::Infer(rustc_type_ir::TyVar(found_vid)) => { + self.infer_ctxt.root_var(expected_vid) == self.infer_ctxt.root_var(found_vid) + } + _ => false, } } @@ -260,29 +334,58 @@ impl<'a> InferenceTable<'a> { /// marked as diverging if necessary, so that resolving them gives the right /// result. pub(super) fn propagate_diverging_flag(&mut self) { - for i in 0..self.type_variable_table.len() { - if !self.type_variable_table[i].contains(TypeVariableFlags::DIVERGING) { - continue; + let mut new_tys = FxHashSet::default(); + for ty in self.diverging_tys.iter() { + match ty.kind(Interner) { + TyKind::InferenceVar(var, kind) => match kind { + TyVariableKind::General => { + let root = InferenceVar::from( + self.infer_ctxt.root_var(TyVid::from_u32(var.index())).as_u32(), + ); + if root.index() != var.index() { + new_tys.insert(TyKind::InferenceVar(root, *kind).intern(Interner)); + } + } + TyVariableKind::Integer => { + let root = InferenceVar::from( + self.infer_ctxt + .inner + .borrow_mut() + .int_unification_table() + .find(IntVid::from_usize(var.index() as usize)) + .as_u32(), + ); + if root.index() != var.index() { + new_tys.insert(TyKind::InferenceVar(root, *kind).intern(Interner)); + } + } + TyVariableKind::Float => { + let root = InferenceVar::from( + self.infer_ctxt + .inner + .borrow_mut() + .float_unification_table() + .find(FloatVid::from_usize(var.index() as usize)) + .as_u32(), + ); + if root.index() != var.index() { + new_tys.insert(TyKind::InferenceVar(root, *kind).intern(Interner)); + } + } + }, + _ => {} } - let v = InferenceVar::from(i as u32); - let root = self.var_unification_table.inference_var_root(v); - self.modify_type_variable_flag(root, |f| { - *f |= TypeVariableFlags::DIVERGING; - }); } + self.diverging_tys.extend(new_tys); } - pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) { - self.modify_type_variable_flag(iv, |f| { - f.set(TypeVariableFlags::DIVERGING, diverging); - }); + pub(super) fn set_diverging(&mut self, iv: InferenceVar, kind: TyVariableKind) { + self.diverging_tys.insert(TyKind::InferenceVar(iv, kind).intern(Interner)); } fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty { - let is_diverging = self - .type_variable_table - .get(iv.index() as usize) - .is_some_and(|data| data.contains(TypeVariableFlags::DIVERGING)); + let is_diverging = + self.diverging_tys.contains(&TyKind::InferenceVar(iv, kind).intern(Interner)); if is_diverging { return TyKind::Never.intern(Interner); } @@ -294,30 +397,14 @@ impl<'a> InferenceTable<'a> { .intern(Interner) } - pub(crate) fn canonicalize_with_free_vars(&mut self, t: T) -> Canonicalized - where - T: TypeFoldable + HasInterner, - { - // try to resolve obligations before canonicalizing, since this might - // result in new knowledge about variables - self.resolve_obligations_as_possible(); - let result = self.var_unification_table.canonicalize(Interner, t); - let free_vars = result - .free_vars - .into_iter() - .map(|free_var| free_var.to_generic_arg(Interner)) - .collect(); - Canonicalized { value: result.quantified, free_vars } - } - - pub(crate) fn canonicalize(&mut self, t: T) -> Canonical + pub(crate) fn canonicalize(&mut self, t: T) -> rustc_type_ir::Canonical, T> where - T: TypeFoldable + HasInterner, + T: rustc_type_ir::TypeFoldable>, { // try to resolve obligations before canonicalizing, since this might // result in new knowledge about variables - self.resolve_obligations_as_possible(); - self.var_unification_table.canonicalize(Interner, t).quantified + self.select_obligations_where_possible(); + self.infer_ctxt.canonicalize_response(t) } /// Recurses through the given type, normalizing associated types mentioned @@ -326,42 +413,26 @@ impl<'a> InferenceTable<'a> { /// type annotation (e.g. from a let type annotation, field type or function /// call). `make_ty` handles this already, but e.g. for field types we need /// to do it as well. - pub(crate) fn normalize_associated_types_in(&mut self, ty: T) -> T + pub(crate) fn normalize_associated_types_in(&mut self, ty: T) -> T where - T: HasInterner + TypeFoldable, + T: ChalkToNextSolver<'db, U>, + U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable>, { - fold_tys_and_consts( - ty, - |e, _| match e { - Either::Left(ty) => Either::Left(match ty.kind(Interner) { - TyKind::Alias(AliasTy::Projection(proj_ty)) => { - self.normalize_projection_ty(proj_ty.clone()) - } - _ => ty, - }), - Either::Right(c) => Either::Right(match &c.data(Interner).value { - chalk_ir::ConstValue::Concrete(cc) => match &cc.interned { - crate::ConstScalar::UnevaluatedConst(c_id, subst) => { - // FIXME: Ideally here we should do everything that we do with type alias, i.e. adding a variable - // and registering an obligation. But it needs chalk support, so we handle the most basic - // case (a non associated const without generic parameters) manually. - if subst.len(Interner) == 0 { - if let Ok(eval) = self.db.const_eval(*c_id, subst.clone(), None) { - eval - } else { - unknown_const(c.data(Interner).ty.clone()) - } - } else { - unknown_const(c.data(Interner).ty.clone()) - } - } - _ => c, - }, - _ => c, - }), - }, - DebruijnIndex::INNERMOST, - ) + self.normalize_associated_types_in_ns(ty.to_nextsolver(self.interner)) + .to_chalk(self.interner) + } + + // FIXME: We should get rid of this method. We cannot deeply normalize during inference, only when finishing. + // Inference should use shallow normalization (`try_structurally_resolve_type()`) only, when needed. + pub(crate) fn normalize_associated_types_in_ns(&mut self, ty: T) -> T + where + T: rustc_type_ir::TypeFoldable> + Clone, + { + let ty = self.resolve_vars_with_obligations(ty); + self.infer_ctxt + .at(&ObligationCause::new(), self.param_env) + .deeply_normalize(ty.clone()) + .unwrap_or(ty) } /// Works almost same as [`Self::normalize_associated_types_in`], but this also resolves shallow @@ -423,51 +494,60 @@ impl<'a> InferenceTable<'a> { } pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { - let var = self.new_type_var(); - let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() }; - let obligation = alias_eq.cast(Interner); - self.register_obligation(obligation); - var + let ty = TyKind::Alias(chalk_ir::AliasTy::Projection(proj_ty)) + .intern(Interner) + .to_nextsolver(self.interner); + self.normalize_alias_ty(ty).to_chalk(self.interner) } - fn modify_type_variable_flag(&mut self, var: InferenceVar, cb: F) - where - F: FnOnce(&mut TypeVariableFlags), - { - let idx = var.index() as usize; - if self.type_variable_table.len() <= idx { - self.extend_type_variable_table(idx); - } - if let Some(f) = self.type_variable_table.get_mut(idx) { - cb(f); - } - } - fn extend_type_variable_table(&mut self, to_index: usize) { - let count = to_index - self.type_variable_table.len() + 1; - self.type_variable_table.extend(std::iter::repeat_n(TypeVariableFlags::default(), count)); + pub(crate) fn normalize_alias_ty( + &mut self, + alias: crate::next_solver::Ty<'db>, + ) -> crate::next_solver::Ty<'db> { + let infer_term = self.infer_ctxt.next_ty_var(); + let obligation = crate::next_solver::Predicate::new( + self.interner, + crate::next_solver::Binder::dummy(crate::next_solver::PredicateKind::AliasRelate( + alias.into(), + infer_term.into(), + rustc_type_ir::AliasRelationDirection::Equate, + )), + ); + self.register_obligation(obligation); + self.resolve_vars_with_obligations(infer_term) } fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty { - let var = self.var_unification_table.new_variable(UniverseIndex::ROOT); - // Chalk might have created some type variables for its own purposes that we don't know about... - self.extend_type_variable_table(var.index() as usize); - assert_eq!(var.index() as usize, self.type_variable_table.len() - 1); - let flags = self.type_variable_table.get_mut(var.index() as usize).unwrap(); + let var = match kind { + TyVariableKind::General => { + let var = self.infer_ctxt.next_ty_vid(); + InferenceVar::from(var.as_u32()) + } + TyVariableKind::Integer => { + let var = self.infer_ctxt.next_int_vid(); + InferenceVar::from(var.as_u32()) + } + TyVariableKind::Float => { + let var = self.infer_ctxt.next_float_vid(); + InferenceVar::from(var.as_u32()) + } + }; + + let ty = var.to_ty(Interner, kind); if diverging { - *flags |= TypeVariableFlags::DIVERGING; + self.diverging_tys.insert(ty.clone()); } - if matches!(kind, TyVariableKind::Integer) { - *flags |= TypeVariableFlags::INTEGER; - } else if matches!(kind, TyVariableKind::Float) { - *flags |= TypeVariableFlags::FLOAT; - } - var.to_ty_with_kind(Interner, kind) + ty } pub(crate) fn new_type_var(&mut self) -> Ty { self.new_var(TyVariableKind::General, false) } + pub(crate) fn next_ty_var(&mut self) -> crate::next_solver::Ty<'db> { + self.infer_ctxt.next_ty_var() + } + pub(crate) fn new_integer_var(&mut self) -> Ty { self.new_var(TyVariableKind::Integer, false) } @@ -481,15 +561,21 @@ impl<'a> InferenceTable<'a> { } pub(crate) fn new_const_var(&mut self, ty: Ty) -> Const { - let var = self.var_unification_table.new_variable(UniverseIndex::ROOT); + let var = self.infer_ctxt.next_const_vid(); + let var = InferenceVar::from(var.as_u32()); var.to_const(Interner, ty) } pub(crate) fn new_lifetime_var(&mut self) -> Lifetime { - let var = self.var_unification_table.new_variable(UniverseIndex::ROOT); + let var = self.infer_ctxt.next_region_vid(); + let var = InferenceVar::from(var.as_u32()); var.to_lifetime(Interner) } + pub(crate) fn next_region_var(&mut self) -> crate::next_solver::Region<'db> { + self.infer_ctxt.next_region_var() + } + pub(crate) fn resolve_with_fallback( &mut self, t: T, @@ -498,16 +584,18 @@ impl<'a> InferenceTable<'a> { where T: HasInterner + TypeFoldable, { - self.resolve_with_fallback_inner(&mut Vec::new(), t, &fallback) + self.resolve_with_fallback_inner(t, &fallback) } pub(crate) fn fresh_subst(&mut self, binders: &[CanonicalVarKind]) -> Substitution { Substitution::from_iter( Interner, - binders.iter().map(|kind| { - let param_infer_var = - kind.map_ref(|&ui| self.var_unification_table.new_variable(ui)); - param_infer_var.to_generic_arg(Interner) + binders.iter().map(|kind| match &kind.kind { + chalk_ir::VariableKind::Ty(ty_variable_kind) => { + self.new_var(*ty_variable_kind, false).cast(Interner) + } + chalk_ir::VariableKind::Lifetime => self.new_lifetime_var().cast(Interner), + chalk_ir::VariableKind::Const(ty) => self.new_const_var(ty.clone()).cast(Interner), }), ) } @@ -520,25 +608,40 @@ impl<'a> InferenceTable<'a> { subst.apply(canonical.value, Interner) } + pub(crate) fn instantiate_canonical_ns( + &mut self, + canonical: rustc_type_ir::Canonical, T>, + ) -> T + where + T: rustc_type_ir::TypeFoldable>, + { + self.infer_ctxt.instantiate_canonical(&canonical).0 + } + fn resolve_with_fallback_inner( &mut self, - var_stack: &mut Vec, t: T, fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg, ) -> T where T: HasInterner + TypeFoldable, { + let var_stack = &mut vec![]; t.fold_with( &mut resolve::Resolver { table: self, var_stack, fallback }, DebruijnIndex::INNERMOST, ) } - pub(crate) fn resolve_completely(&mut self, t: T) -> T + pub(crate) fn resolve_completely(&mut self, t: T) -> T where - T: HasInterner + TypeFoldable, + T: HasInterner + TypeFoldable + ChalkToNextSolver<'db, U>, + U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable>, { + let t = self.resolve_with_fallback(t, &|_, _, d, _| d); + let t = self.normalize_associated_types_in(t); + // let t = self.resolve_opaque_tys_in(t); + // Resolve again, because maybe normalization inserted infer vars. self.resolve_with_fallback(t, &|_, _, d, _| d) } @@ -554,29 +657,26 @@ impl<'a> InferenceTable<'a> { let int_fallback = TyKind::Scalar(Scalar::Int(IntTy::I32)).intern(Interner); let float_fallback = TyKind::Scalar(Scalar::Float(FloatTy::F64)).intern(Interner); - let scalar_vars: Vec<_> = self - .type_variable_table - .iter() - .enumerate() - .filter_map(|(index, flags)| { - let kind = if flags.contains(TypeVariableFlags::INTEGER) { - TyVariableKind::Integer - } else if flags.contains(TypeVariableFlags::FLOAT) { - TyVariableKind::Float - } else { - return None; + let int_vars = self.infer_ctxt.inner.borrow_mut().int_unification_table().len(); + for v in 0..int_vars { + let var = InferenceVar::from(v as u32).to_ty(Interner, TyVariableKind::Integer); + let maybe_resolved = self.resolve_ty_shallow(&var); + if let TyKind::InferenceVar(_, kind) = maybe_resolved.kind(Interner) { + // I don't think we can ever unify these vars with float vars, but keep this here for now + let fallback = match kind { + TyVariableKind::Integer => &int_fallback, + TyVariableKind::Float => &float_fallback, + TyVariableKind::General => unreachable!(), }; - - // FIXME: This is not really the nicest way to get `InferenceVar`s. Can we get them - // without directly constructing them from `index`? - let var = InferenceVar::from(index as u32).to_ty(Interner, kind); - Some(var) - }) - .collect(); - - for var in scalar_vars { + self.unify(&var, fallback); + } + } + let float_vars = self.infer_ctxt.inner.borrow_mut().float_unification_table().len(); + for v in 0..float_vars { + let var = InferenceVar::from(v as u32).to_ty(Interner, TyVariableKind::Float); let maybe_resolved = self.resolve_ty_shallow(&var); if let TyKind::InferenceVar(_, kind) = maybe_resolved.kind(Interner) { + // I don't think we can ever unify these vars with float vars, but keep this here for now let fallback = match kind { TyVariableKind::Integer => &int_fallback, TyVariableKind::Float => &float_fallback, @@ -588,258 +688,291 @@ impl<'a> InferenceTable<'a> { } /// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that. - #[tracing::instrument(skip_all)] - pub(crate) fn unify>(&mut self, ty1: &T, ty2: &T) -> bool { + pub(crate) fn unify, U: Relate>>( + &mut self, + ty1: &T, + ty2: &T, + ) -> bool { let result = match self.try_unify(ty1, ty2) { Ok(r) => r, Err(_) => return false, }; - self.register_infer_ok(result); + self.register_obligations(result.goals); + true + } + + pub(crate) fn unify_ns>>(&mut self, lhs: T, rhs: T) -> bool { + let Ok(infer_ok) = self.try_unify_ns(lhs, rhs) else { + return false; + }; + self.register_obligations(infer_ok.goals); true } /// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled - pub(crate) fn unify_deeply>(&mut self, ty1: &T, ty2: &T) -> bool { + pub(crate) fn unify_deeply, U: Relate>>( + &mut self, + ty1: &T, + ty2: &T, + ) -> bool { let result = match self.try_unify(ty1, ty2) { Ok(r) => r, Err(_) => return false, }; - result.goals.iter().all(|goal| { - let canonicalized = self.canonicalize_with_free_vars(goal.clone()); - self.try_resolve_obligation(&canonicalized).is_some() + result.goals.into_iter().all(|goal| { + matches!(next_trait_solve_in_ctxt(&self.infer_ctxt, goal), Ok((_, Certainty::Yes))) }) } /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the /// caller needs to deal with them. - pub(crate) fn try_unify>( + pub(crate) fn try_unify, U: Relate>>( &mut self, t1: &T, t2: &T, - ) -> InferResult<()> { - match self.var_unification_table.relate( - Interner, - &self.db, - &self.trait_env.env, - chalk_ir::Variance::Invariant, - t1, - t2, - ) { - Ok(result) => Ok(InferOk { goals: result.goals, value: () }), - Err(chalk_ir::NoSolution) => Err(TypeError), + ) -> InferResult<'db, ()> { + let lhs = t1.to_nextsolver(self.interner); + let rhs = t2.to_nextsolver(self.interner); + self.try_unify_ns(lhs, rhs) + } + + /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the + /// caller needs to deal with them. + pub(crate) fn try_unify_ns>>( + &mut self, + lhs: T, + rhs: T, + ) -> InferResult<'db, ()> { + let variance = rustc_type_ir::Variance::Invariant; + let span = crate::next_solver::Span::dummy(); + match self.infer_ctxt.relate(self.param_env, lhs, variance, rhs, span) { + Ok(goals) => Ok(crate::infer::InferOk { goals, value: () }), + Err(_) => Err(TypeError), } } /// If `ty` is a type variable with known type, returns that type; /// otherwise, return ty. + #[tracing::instrument(skip(self))] pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty { - self.resolve_obligations_as_possible(); - self.var_unification_table.normalize_ty_shallow(Interner, ty).unwrap_or_else(|| ty.clone()) + self.shallow_resolve(ty.to_nextsolver(self.interner)).to_chalk(self.interner) + } + + pub(crate) fn shallow_resolve( + &self, + ty: crate::next_solver::Ty<'db>, + ) -> crate::next_solver::Ty<'db> { + self.infer_ctxt.shallow_resolve(ty) + } + + pub(crate) fn resolve_vars_with_obligations(&mut self, t: T) -> T + where + T: rustc_type_ir::TypeFoldable>, + { + use rustc_type_ir::TypeVisitableExt; + + if !t.has_non_region_infer() { + return t; + } + + let t = self.infer_ctxt.resolve_vars_if_possible(t); + + if !t.has_non_region_infer() { + return t; + } + + self.select_obligations_where_possible(); + self.infer_ctxt.resolve_vars_if_possible(t) + } + + pub(crate) fn structurally_resolve_type(&mut self, ty: &Ty) -> Ty { + if let TyKind::Alias(..) = ty.kind(Interner) { + self.structurally_normalize_ty(ty) + } else { + self.resolve_vars_with_obligations(ty.to_nextsolver(self.interner)) + .to_chalk(self.interner) + } + } + + fn structurally_normalize_ty(&mut self, ty: &Ty) -> Ty { + self.structurally_normalize_term(ty.to_nextsolver(self.interner).into()) + .expect_ty() + .to_chalk(self.interner) + } + + fn structurally_normalize_term(&mut self, term: Term<'db>) -> Term<'db> { + self.infer_ctxt + .at(&ObligationCause::new(), self.param_env) + .structurally_normalize_term(term, &mut self.fulfillment_cx) + .unwrap_or(term) } - pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot { - let var_table_snapshot = self.var_unification_table.snapshot(); - let type_variable_table = self.type_variable_table.clone(); - let pending_obligations = self.pending_obligations.clone(); - InferenceTableSnapshot { var_table_snapshot, pending_obligations, type_variable_table } + /// Try to resolve `ty` to a structural type, normalizing aliases. + /// + /// In case there is still ambiguity, the returned type may be an inference + /// variable. This is different from `structurally_resolve_type` which errors + /// in this case. + pub(crate) fn try_structurally_resolve_type( + &mut self, + ty: crate::next_solver::Ty<'db>, + ) -> crate::next_solver::Ty<'db> { + if let crate::next_solver::TyKind::Alias(..) = ty.kind() { + // We need to use a separate variable here as otherwise the temporary for + // `self.fulfillment_cx.borrow_mut()` is alive in the `Err` branch, resulting + // in a reentrant borrow, causing an ICE. + let result = self + .infer_ctxt + .at(&ObligationCause::misc(), self.param_env) + .structurally_normalize_ty(ty, &mut self.fulfillment_cx); + match result { + Ok(normalized_ty) => normalized_ty, + Err(_errors) => crate::next_solver::Ty::new_error(self.interner, ErrorGuaranteed), + } + } else { + self.resolve_vars_with_obligations(ty) + } + } + + pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot<'db> { + let ctxt_snapshot = self.infer_ctxt.start_snapshot(); + let diverging_tys = self.diverging_tys.clone(); + let obligations = self.fulfillment_cx.clone(); + InferenceTableSnapshot { ctxt_snapshot, diverging_tys, obligations } } #[tracing::instrument(skip_all)] - pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) { - self.var_unification_table.rollback_to(snapshot.var_table_snapshot); - self.type_variable_table = snapshot.type_variable_table; - self.pending_obligations = snapshot.pending_obligations; + pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot<'db>) { + self.infer_ctxt.rollback_to(snapshot.ctxt_snapshot); + self.diverging_tys = snapshot.diverging_tys; + self.fulfillment_cx = snapshot.obligations; } #[tracing::instrument(skip_all)] - pub(crate) fn run_in_snapshot(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T { + pub(crate) fn run_in_snapshot( + &mut self, + f: impl FnOnce(&mut InferenceTable<'db>) -> T, + ) -> T { let snapshot = self.snapshot(); let result = f(self); self.rollback_to(snapshot); result } + pub(crate) fn commit_if_ok( + &mut self, + f: impl FnOnce(&mut InferenceTable<'db>) -> Result, + ) -> Result { + let snapshot = self.snapshot(); + let result = f(self); + match result { + Ok(_) => {} + Err(_) => { + self.rollback_to(snapshot); + } + } + result + } + /// Checks an obligation without registering it. Useful mostly to check /// whether a trait *might* be implemented before deciding to 'lock in' the /// choice (during e.g. method resolution or deref). - pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option { + #[tracing::instrument(level = "debug", skip(self))] + pub(crate) fn try_obligation(&mut self, goal: Goal) -> NextTraitSolveResult { let in_env = InEnvironment::new(&self.trait_env.env, goal); - let canonicalized = self.canonicalize(in_env); + let canonicalized = self.canonicalize(in_env.to_nextsolver(self.interner)); - self.db.trait_solve(self.trait_env.krate, self.trait_env.block, canonicalized) + next_trait_solve_canonical_in_ctxt(&self.infer_ctxt, canonicalized) } - pub(crate) fn register_obligation(&mut self, goal: Goal) { - let in_env = InEnvironment::new(&self.trait_env.env, goal); - self.register_obligation_in_env(in_env) + #[tracing::instrument(level = "debug", skip(self))] + pub(crate) fn solve_obligation(&mut self, goal: Goal) -> Result { + let goal = InEnvironment::new(&self.trait_env.env, goal); + let goal = goal.to_nextsolver(self.interner); + let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal); + result.map(|m| m.1) } - fn register_obligation_in_env(&mut self, goal: InEnvironment) { - let canonicalized = self.canonicalize_with_free_vars(goal); - let solution = self.try_resolve_obligation(&canonicalized); - if matches!(solution, Some(Solution::Ambig(_))) { - self.pending_obligations.push(canonicalized); - } - } - - pub(crate) fn register_infer_ok(&mut self, infer_ok: InferOk) { - infer_ok.goals.into_iter().for_each(|goal| self.register_obligation_in_env(goal)); + pub(crate) fn register_obligation(&mut self, predicate: Predicate<'db>) { + let goal = next_solver::Goal { + param_env: self.trait_env.env.to_nextsolver(self.interner), + predicate, + }; + self.register_obligation_in_env(goal) } - pub(crate) fn resolve_obligations_as_possible(&mut self) { - let _span = tracing::info_span!("resolve_obligations_as_possible").entered(); - let mut changed = true; - let mut obligations = mem::take(&mut self.resolve_obligations_buffer); - while mem::take(&mut changed) { - mem::swap(&mut self.pending_obligations, &mut obligations); - - for canonicalized in obligations.drain(..) { - if !self.check_changed(&canonicalized) { - self.pending_obligations.push(canonicalized); - continue; - } - changed = true; - let uncanonical = chalk_ir::Substitute::apply( - &canonicalized.free_vars, - canonicalized.value.value, - Interner, + #[tracing::instrument(level = "debug", skip(self))] + fn register_obligation_in_env( + &mut self, + goal: next_solver::Goal<'db, next_solver::Predicate<'db>>, + ) { + let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal); + tracing::debug!(?result); + match result { + Ok((_, Certainty::Yes)) => {} + Err(rustc_type_ir::solve::NoSolution) => {} + Ok((_, Certainty::Maybe { .. })) => { + self.fulfillment_cx.register_predicate_obligation( + &self.infer_ctxt, + Obligation::new( + self.interner, + ObligationCause::new(), + goal.param_env, + goal.predicate, + ), ); - self.register_obligation_in_env(uncanonical); } } - self.resolve_obligations_buffer = obligations; - self.resolve_obligations_buffer.clear(); } - pub(crate) fn fudge_inference>( - &mut self, - f: impl FnOnce(&mut Self) -> T, - ) -> T { - use chalk_ir::fold::TypeFolder; - - #[derive(chalk_derive::FallibleTypeFolder)] - #[has_interner(Interner)] - struct VarFudger<'a, 'b> { - table: &'a mut InferenceTable<'b>, - highest_known_var: InferenceVar, - } - impl TypeFolder for VarFudger<'_, '_> { - fn as_dyn(&mut self) -> &mut dyn TypeFolder { - self - } - - fn interner(&self) -> Interner { - Interner - } + pub(crate) fn register_infer_ok(&mut self, infer_ok: InferOk<'db, T>) -> T { + let InferOk { value, obligations } = infer_ok; + self.register_predicates(obligations); + value + } - fn fold_inference_ty( - &mut self, - var: chalk_ir::InferenceVar, - kind: TyVariableKind, - _outer_binder: chalk_ir::DebruijnIndex, - ) -> chalk_ir::Ty { - if var < self.highest_known_var { - var.to_ty(Interner, kind) - } else { - self.table.new_type_var() - } - } + pub(crate) fn register_obligations( + &mut self, + obligations: Vec>>, + ) { + obligations.into_iter().for_each(|goal| self.register_obligation_in_env(goal)); + } - fn fold_inference_lifetime( - &mut self, - var: chalk_ir::InferenceVar, - _outer_binder: chalk_ir::DebruijnIndex, - ) -> chalk_ir::Lifetime { - if var < self.highest_known_var { - var.to_lifetime(Interner) - } else { - self.table.new_lifetime_var() - } - } + pub(crate) fn select_obligations_where_possible(&mut self) { + self.fulfillment_cx.select_where_possible(&self.infer_ctxt); + } - fn fold_inference_const( - &mut self, - ty: chalk_ir::Ty, - var: chalk_ir::InferenceVar, - _outer_binder: chalk_ir::DebruijnIndex, - ) -> chalk_ir::Const { - if var < self.highest_known_var { - var.to_const(Interner, ty) - } else { - self.table.new_const_var(ty) - } - } + pub(super) fn register_predicate( + &mut self, + obligation: crate::next_solver::infer::traits::PredicateObligation<'db>, + ) { + if obligation.has_escaping_bound_vars() { + panic!("escaping bound vars in predicate {:?}", obligation); } - let snapshot = self.snapshot(); - let highest_known_var = self.new_type_var().inference_var(Interner).expect("inference_var"); - let result = f(self); - self.rollback_to(snapshot); - result - .fold_with(&mut VarFudger { table: self, highest_known_var }, DebruijnIndex::INNERMOST) - } - - /// This checks whether any of the free variables in the `canonicalized` - /// have changed (either been unified with another variable, or with a - /// value). If this is not the case, we don't need to try to solve the goal - /// again -- it'll give the same result as last time. - fn check_changed(&mut self, canonicalized: &Canonicalized>) -> bool { - canonicalized.free_vars.iter().any(|var| { - let iv = match var.data(Interner) { - GenericArgData::Ty(ty) => ty.inference_var(Interner), - GenericArgData::Lifetime(lt) => lt.inference_var(Interner), - GenericArgData::Const(c) => c.inference_var(Interner), - } - .expect("free var is not inference var"); - if self.var_unification_table.probe_var(iv).is_some() { - return true; - } - let root = self.var_unification_table.inference_var_root(iv); - iv != root - }) + self.fulfillment_cx.register_predicate_obligation(&self.infer_ctxt, obligation); } - fn try_resolve_obligation( - &mut self, - canonicalized: &Canonicalized>, - ) -> Option> { - let solution = self.db.trait_solve( - self.trait_env.krate, - self.trait_env.block, - canonicalized.value.clone(), - ); - - match &solution { - Some(Solution::Unique(canonical_subst)) => { - canonicalized.apply_solution( - self, - Canonical { - binders: canonical_subst.binders.clone(), - // FIXME: handle constraints - value: canonical_subst.value.subst.clone(), - }, - ); - } - Some(Solution::Ambig(Guidance::Definite(substs))) => { - canonicalized.apply_solution(self, substs.clone()); - } - Some(_) => { - // FIXME use this when trying to resolve everything at the end - } - None => { - // FIXME obligation cannot be fulfilled => diagnostic - } - } - solution + pub(super) fn register_predicates(&mut self, obligations: I) + where + I: IntoIterator>, + { + obligations.into_iter().for_each(|obligation| { + self.register_predicate(obligation); + }); } pub(crate) fn callable_sig( &mut self, ty: &Ty, num_args: usize, - ) -> Option<(Option, Vec, Ty)> { + ) -> Option<(Option, Vec>, crate::next_solver::Ty<'db>)> + { match ty.callable_sig(self.db) { - Some(sig) => Some((None, sig.params().to_vec(), sig.ret().clone())), + Some(sig) => Some(( + None, + sig.params().iter().map(|param| param.to_nextsolver(self.interner)).collect(), + sig.ret().to_nextsolver(self.interner), + )), None => { let (f, args_ty, return_ty) = self.callable_sig_from_fn_trait(ty, num_args)?; Some((Some(f), args_ty, return_ty)) @@ -851,7 +984,7 @@ impl<'a> InferenceTable<'a> { &mut self, ty: &Ty, num_args: usize, - ) -> Option<(FnTrait, Vec, Ty)> { + ) -> Option<(FnTrait, Vec>, crate::next_solver::Ty<'db>)> { for (fn_trait_name, output_assoc_name, subtraits) in [ (FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]), (FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]), @@ -871,7 +1004,7 @@ impl<'a> InferenceTable<'a> { ParamKind::Lifetime => unreachable!("Tuple with lifetime parameter"), ParamKind::Const(_) => unreachable!("Tuple with const parameter"), }; - arg_tys.push(arg.clone()); + arg_tys.push(arg.to_nextsolver(self.interner)); arg.cast(Interner) }) .build(); @@ -890,30 +1023,16 @@ impl<'a> InferenceTable<'a> { .fill_with_unknown() .build(); - let trait_env = self.trait_env.env.clone(); - let obligation = InEnvironment { - goal: trait_ref.clone().cast(Interner), - environment: trait_env.clone(), - }; - let canonical = self.canonicalize(obligation.clone()); - if self.db.trait_solve(krate, self.trait_env.block, canonical.cast(Interner)).is_some() - { - self.register_obligation(obligation.goal); - let return_ty = self.normalize_projection_ty(projection); + let goal: Goal = trait_ref.clone().cast(Interner); + if !self.try_obligation(goal.clone()).no_solution() { + self.register_obligation(goal.to_nextsolver(self.interner)); + let return_ty = + self.normalize_projection_ty(projection).to_nextsolver(self.interner); for &fn_x in subtraits { let fn_x_trait = fn_x.get_id(self.db, krate)?; trait_ref.trait_id = to_chalk_trait_id(fn_x_trait); - let obligation: chalk_ir::InEnvironment> = - InEnvironment { - goal: trait_ref.clone().cast(Interner), - environment: trait_env.clone(), - }; - let canonical = self.canonicalize(obligation.clone()); - if self - .db - .trait_solve(krate, self.trait_env.block, canonical.cast(Interner)) - .is_some() - { + let goal = trait_ref.clone().cast(Interner); + if !self.try_obligation(goal).no_solution() { return Some((fn_x, arg_tys, return_ty)); } } @@ -947,13 +1066,38 @@ impl<'a> InferenceTable<'a> { match ty.kind(Interner) { TyKind::Error => self.new_type_var(), TyKind::InferenceVar(..) => { - let ty_resolved = self.resolve_ty_shallow(&ty); + let ty_resolved = self.structurally_resolve_type(&ty); if ty_resolved.is_unknown() { self.new_type_var() } else { ty } } _ => ty, } } + /// Whenever you lower a user-written type, you should call this. + pub(crate) fn process_user_written_ty(&mut self, ty: T) -> T + where + T: HasInterner + TypeFoldable + ChalkToNextSolver<'db, U>, + U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable>, + { + self.process_remote_user_written_ty(ty) + // FIXME: Register a well-formed obligation. + } + + /// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation, + /// while `process_user_written_ty()` should (but doesn't currently). + pub(crate) fn process_remote_user_written_ty(&mut self, ty: T) -> T + where + T: HasInterner + TypeFoldable + ChalkToNextSolver<'db, U>, + U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable>, + { + let ty = self.insert_type_vars(ty); + // See https://github.com/rust-lang/rust/blob/cdb45c87e2cd43495379f7e867e3cc15dcee9f93/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs#L487-L495: + // Even though the new solver only lazily normalizes usually, here we eagerly normalize so that not everything needs + // to normalize before inspecting the `TyKind`. + // FIXME(next-solver): We should not deeply normalize here, only shallowly. + self.normalize_associated_types_in(ty) + } + /// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it. pub(super) fn insert_const_vars_shallow(&mut self, c: Const) -> Const { let data = c.data(Interner); @@ -1032,13 +1176,16 @@ impl<'a> InferenceTable<'a> { substitution: Substitution::from1(Interner, ty), }); let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(sized_pred)).intern(Interner); - matches!(self.try_obligation(goal), Some(Solution::Unique(_))) + self.try_obligation(goal).certain() } } impl fmt::Debug for InferenceTable<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish() + f.debug_struct("InferenceTable") + .field("name", &self.infer_ctxt.inner.borrow().type_variable_storage) + .field("fulfillment_cx", &self.fulfillment_cx) + .finish() } } @@ -1047,11 +1194,19 @@ mod resolve { use crate::{ ConcreteConst, Const, ConstData, ConstScalar, ConstValue, DebruijnIndex, GenericArg, InferenceVar, Interner, Lifetime, Ty, TyVariableKind, VariableKind, + next_solver::mapping::NextSolverToChalk, }; use chalk_ir::{ cast::Cast, fold::{TypeFoldable, TypeFolder}, }; + use rustc_type_ir::{FloatVid, IntVid, TyVid}; + + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub(super) enum VarKind { + Ty(TyVariableKind), + Const, + } #[derive(chalk_derive::FallibleTypeFolder)] #[has_interner(Interner)] @@ -1061,7 +1216,7 @@ mod resolve { F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg, > { pub(super) table: &'a mut InferenceTable<'b>, - pub(super) var_stack: &'a mut Vec, + pub(super) var_stack: &'a mut Vec<(InferenceVar, VarKind)>, pub(super) fallback: F, } impl TypeFolder for Resolver<'_, '_, F> @@ -1082,25 +1237,91 @@ mod resolve { kind: TyVariableKind, outer_binder: DebruijnIndex, ) -> Ty { - let var = self.table.var_unification_table.inference_var_root(var); - if self.var_stack.contains(&var) { - // recursive type - let default = self.table.fallback_value(var, kind).cast(Interner); - return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) - .assert_ty_ref(Interner) - .clone(); - } - if let Some(known_ty) = self.table.var_unification_table.probe_var(var) { - // known_ty may contain other variables that are known by now - self.var_stack.push(var); - let result = known_ty.fold_with(self, outer_binder); - self.var_stack.pop(); - result.assert_ty_ref(Interner).clone() - } else { - let default = self.table.fallback_value(var, kind).cast(Interner); - (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) - .assert_ty_ref(Interner) - .clone() + match kind { + TyVariableKind::General => { + let vid = self.table.infer_ctxt.root_var(TyVid::from(var.index())); + let var = InferenceVar::from(vid.as_u32()); + if self.var_stack.contains(&(var, VarKind::Ty(kind))) { + // recursive type + let default = self.table.fallback_value(var, kind).cast(Interner); + return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) + .assert_ty_ref(Interner) + .clone(); + } + if let Ok(known_ty) = self.table.infer_ctxt.probe_ty_var(vid) { + let known_ty: Ty = known_ty.to_chalk(self.table.interner); + // known_ty may contain other variables that are known by now + self.var_stack.push((var, VarKind::Ty(kind))); + let result = known_ty.fold_with(self, outer_binder); + self.var_stack.pop(); + result + } else { + let default = self.table.fallback_value(var, kind).cast(Interner); + (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) + .assert_ty_ref(Interner) + .clone() + } + } + TyVariableKind::Integer => { + let vid = self + .table + .infer_ctxt + .inner + .borrow_mut() + .int_unification_table() + .find(IntVid::from(var.index())); + let var = InferenceVar::from(vid.as_u32()); + if self.var_stack.contains(&(var, VarKind::Ty(kind))) { + // recursive type + let default = self.table.fallback_value(var, kind).cast(Interner); + return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) + .assert_ty_ref(Interner) + .clone(); + } + if let Some(known_ty) = self.table.infer_ctxt.resolve_int_var(vid) { + let known_ty: Ty = known_ty.to_chalk(self.table.interner); + // known_ty may contain other variables that are known by now + self.var_stack.push((var, VarKind::Ty(kind))); + let result = known_ty.fold_with(self, outer_binder); + self.var_stack.pop(); + result + } else { + let default = self.table.fallback_value(var, kind).cast(Interner); + (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) + .assert_ty_ref(Interner) + .clone() + } + } + TyVariableKind::Float => { + let vid = self + .table + .infer_ctxt + .inner + .borrow_mut() + .float_unification_table() + .find(FloatVid::from(var.index())); + let var = InferenceVar::from(vid.as_u32()); + if self.var_stack.contains(&(var, VarKind::Ty(kind))) { + // recursive type + let default = self.table.fallback_value(var, kind).cast(Interner); + return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) + .assert_ty_ref(Interner) + .clone(); + } + if let Some(known_ty) = self.table.infer_ctxt.resolve_float_var(vid) { + let known_ty: Ty = known_ty.to_chalk(self.table.interner); + // known_ty may contain other variables that are known by now + self.var_stack.push((var, VarKind::Ty(kind))); + let result = known_ty.fold_with(self, outer_binder); + self.var_stack.pop(); + result + } else { + let default = self.table.fallback_value(var, kind).cast(Interner); + (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) + .assert_ty_ref(Interner) + .clone() + } + } } } @@ -1110,25 +1331,30 @@ mod resolve { var: InferenceVar, outer_binder: DebruijnIndex, ) -> Const { - let var = self.table.var_unification_table.inference_var_root(var); + let vid = self + .table + .infer_ctxt + .root_const_var(rustc_type_ir::ConstVid::from_u32(var.index())); + let var = InferenceVar::from(vid.as_u32()); let default = ConstData { ty: ty.clone(), value: ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Unknown }), } .intern(Interner) .cast(Interner); - if self.var_stack.contains(&var) { + if self.var_stack.contains(&(var, VarKind::Const)) { // recursive return (self.fallback)(var, VariableKind::Const(ty), default, outer_binder) .assert_const_ref(Interner) .clone(); } - if let Some(known_ty) = self.table.var_unification_table.probe_var(var) { + if let Ok(known_const) = self.table.infer_ctxt.probe_const_var(vid) { + let known_const: Const = known_const.to_chalk(self.table.interner); // known_ty may contain other variables that are known by now - self.var_stack.push(var); - let result = known_ty.fold_with(self, outer_binder); + self.var_stack.push((var, VarKind::Const)); + let result = known_const.fold_with(self, outer_binder); self.var_stack.pop(); - result.assert_const_ref(Interner).clone() + result } else { (self.fallback)(var, VariableKind::Const(ty), default, outer_binder) .assert_const_ref(Interner) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs index fecb3f4242a92..57ef5523b4332 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs @@ -2,11 +2,10 @@ //! representation of the various objects Chalk deals with (types, goals etc.). use crate::{ - AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData, ConstScalar, - Constraint, Constraints, FnAbi, FnDefId, GenericArg, GenericArgData, Goal, GoalData, Goals, - InEnvironment, Lifetime, LifetimeData, OpaqueTy, OpaqueTyId, ProgramClause, ProgramClauseData, - ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, Ty, - TyData, TyKind, VariableKind, VariableKinds, chalk_db, tls, + AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData, ConstScalar, FnAbi, + FnDefId, GenericArg, GenericArgData, Goal, GoalData, InEnvironment, Lifetime, LifetimeData, + OpaqueTy, OpaqueTyId, ProgramClause, ProjectionTy, QuantifiedWhereClause, + QuantifiedWhereClauses, Substitution, Ty, TyKind, VariableKind, chalk_db, tls, }; use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance}; use hir_def::TypeAliasId; @@ -15,11 +14,19 @@ use smallvec::SmallVec; use std::fmt; use triomphe::Arc; +type TyData = chalk_ir::TyData; +type VariableKinds = chalk_ir::VariableKinds; +type Goals = chalk_ir::Goals; +type ProgramClauseData = chalk_ir::ProgramClauseData; +type Constraint = chalk_ir::Constraint; +type Constraints = chalk_ir::Constraints; +type ProgramClauses = chalk_ir::ProgramClauses; + #[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] pub struct Interner; -#[derive(PartialEq, Eq, Hash)] -pub struct InternedWrapper(T); +#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] +pub struct InternedWrapper(pub(crate) T); impl fmt::Debug for InternedWrapper { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -27,6 +34,9 @@ impl fmt::Debug for InternedWrapper { } } +#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] +pub struct InternedWrapperNoDebug(pub(crate) T); + impl std::ops::Deref for InternedWrapper { type Target = T; @@ -124,6 +134,7 @@ impl chalk_ir::interner::Interner for Interner { fmt: &mut fmt::Formatter<'_>, ) -> Option { tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt))) + .or_else(|| Some(fmt.write_str("ProjectionTy"))) } fn debug_opaque_ty(opaque_ty: &OpaqueTy, fmt: &mut fmt::Formatter<'_>) -> Option { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index 107da6a5af6d6..f21673c732e40 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -2,33 +2,40 @@ use std::fmt; -use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy}; use hir_def::{ - LocalFieldId, StructId, - layout::{ - Float, Integer, LayoutCalculator, LayoutCalculatorError, LayoutData, Primitive, - ReprOptions, Scalar, StructKind, TargetDataLayout, WrappingRange, - }, + AdtId, LocalFieldId, StructId, + layout::{LayoutCalculatorError, LayoutData}, }; use la_arena::{Idx, RawIdx}; -use rustc_abi::AddressSpace; -use rustc_index::IndexVec; +use rustc_abi::{ + AddressSpace, Float, Integer, LayoutCalculator, Primitive, ReprOptions, Scalar, StructKind, + TargetDataLayout, WrappingRange, +}; +use rustc_index::IndexVec; +use rustc_type_ir::{ + FloatTy, IntTy, UintTy, + inherent::{IntoKind, SliceLike}, +}; use triomphe::Arc; +use crate::utils::ClosureSubst; use crate::{ - Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, - consteval::try_const_usize, - db::{HirDatabase, InternedClosure}, - infer::normalize, - utils::ClosureSubst, + Interner, TraitEnvironment, + consteval_nextsolver::try_const_usize, + db::HirDatabase, + next_solver::{ + DbInterner, GenericArgs, ParamEnv, Ty, TyKind, TypingMode, + infer::{DbInternerInferExt, traits::ObligationCause}, + mapping::{ChalkToNextSolver, convert_args_for_result}, + }, }; pub(crate) use self::adt::layout_of_adt_cycle_result; pub use self::{adt::layout_of_adt_query, target::target_data_layout_query}; -mod adt; -mod target; +pub(crate) mod adt; +pub(crate) mod target; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct RustcEnumVariantIdx(pub usize); @@ -119,11 +126,12 @@ impl<'a> LayoutCx<'a> { } } -fn layout_of_simd_ty( - db: &dyn HirDatabase, +// FIXME: move this to the `rustc_abi`. +fn layout_of_simd_ty<'db>( + db: &'db dyn HirDatabase, id: StructId, repr_packed: bool, - subst: &Substitution, + args: &GenericArgs<'db>, env: Arc, dl: &TargetDataLayout, ) -> Result, LayoutError> { @@ -132,115 +140,119 @@ fn layout_of_simd_ty( // * #[repr(simd)] struct S([T; 4]) // // where T is a primitive scalar (integer/float/pointer). - let fields = db.field_types(id.into()); + let fields = db.field_types_ns(id.into()); let mut fields = fields.iter(); let Some(TyKind::Array(e_ty, e_len)) = fields .next() .filter(|_| fields.next().is_none()) - .map(|f| f.1.clone().substitute(Interner, subst).kind(Interner).clone()) + .map(|f| (*f.1).instantiate(DbInterner::new_with(db, None, None), args).kind()) else { return Err(LayoutError::InvalidSimdType); }; - let e_len = try_const_usize(db, &e_len).ok_or(LayoutError::HasErrorConst)? as u64; + let e_len = try_const_usize(db, e_len).ok_or(LayoutError::HasErrorConst)? as u64; let e_ly = db.layout_of_ty(e_ty, env)?; let cx = LayoutCx::new(dl); Ok(Arc::new(cx.calc.simd_type(e_ly, e_len, repr_packed)?)) } -pub fn layout_of_ty_query( - db: &dyn HirDatabase, - ty: Ty, +pub fn layout_of_ty_query<'db>( + db: &'db dyn HirDatabase, + ty: Ty<'db>, trait_env: Arc, ) -> Result, LayoutError> { let krate = trait_env.krate; + let interner = DbInterner::new_with(db, Some(krate), trait_env.block); let Ok(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable); }; let dl = &*target; let cx = LayoutCx::new(dl); - let ty = normalize(db, trait_env.clone(), ty); - let kind = ty.kind(Interner); - let result = match kind { - TyKind::Adt(AdtId(def), subst) => { - if let hir_def::AdtId::StructId(s) = def { - let data = db.struct_signature(*s); - let repr = data.repr.unwrap_or_default(); - if repr.simd() { - return layout_of_simd_ty(db, *s, repr.packed(), subst, trait_env, &target); + let infer_ctxt = interner.infer_ctxt().build(TypingMode::PostAnalysis); + let cause = ObligationCause::dummy(); + let ty = infer_ctxt.at(&cause, ParamEnv::empty()).deeply_normalize(ty).unwrap_or(ty); + let result = match ty.kind() { + TyKind::Adt(def, args) => { + match def.inner().id { + hir_def::AdtId::StructId(s) => { + let data = db.struct_signature(s); + let repr = data.repr.unwrap_or_default(); + if repr.simd() { + return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target); + } } - }; - return db.layout_of_adt(*def, subst.clone(), trait_env); + _ => {} + } + return db.layout_of_adt(def.inner().id, args, trait_env); } - TyKind::Scalar(s) => match s { - chalk_ir::Scalar::Bool => Layout::scalar( - dl, - Scalar::Initialized { - value: Primitive::Int(Integer::I8, false), - valid_range: WrappingRange { start: 0, end: 1 }, - }, - ), - chalk_ir::Scalar::Char => Layout::scalar( + TyKind::Bool => Layout::scalar( + dl, + Scalar::Initialized { + value: Primitive::Int(Integer::I8, false), + valid_range: WrappingRange { start: 0, end: 1 }, + }, + ), + TyKind::Char => Layout::scalar( + dl, + Scalar::Initialized { + value: Primitive::Int(Integer::I32, false), + valid_range: WrappingRange { start: 0, end: 0x10FFFF }, + }, + ), + TyKind::Int(i) => Layout::scalar( + dl, + scalar_unit( dl, - Scalar::Initialized { - value: Primitive::Int(Integer::I32, false), - valid_range: WrappingRange { start: 0, end: 0x10FFFF }, - }, - ), - chalk_ir::Scalar::Int(i) => Layout::scalar( - dl, - scalar_unit( - dl, - Primitive::Int( - match i { - IntTy::Isize => dl.ptr_sized_integer(), - IntTy::I8 => Integer::I8, - IntTy::I16 => Integer::I16, - IntTy::I32 => Integer::I32, - IntTy::I64 => Integer::I64, - IntTy::I128 => Integer::I128, - }, - true, - ), + Primitive::Int( + match i { + IntTy::Isize => dl.ptr_sized_integer(), + IntTy::I8 => Integer::I8, + IntTy::I16 => Integer::I16, + IntTy::I32 => Integer::I32, + IntTy::I64 => Integer::I64, + IntTy::I128 => Integer::I128, + }, + true, ), ), - chalk_ir::Scalar::Uint(i) => Layout::scalar( + ), + TyKind::Uint(i) => Layout::scalar( + dl, + scalar_unit( dl, - scalar_unit( - dl, - Primitive::Int( - match i { - UintTy::Usize => dl.ptr_sized_integer(), - UintTy::U8 => Integer::I8, - UintTy::U16 => Integer::I16, - UintTy::U32 => Integer::I32, - UintTy::U64 => Integer::I64, - UintTy::U128 => Integer::I128, - }, - false, - ), + Primitive::Int( + match i { + UintTy::Usize => dl.ptr_sized_integer(), + UintTy::U8 => Integer::I8, + UintTy::U16 => Integer::I16, + UintTy::U32 => Integer::I32, + UintTy::U64 => Integer::I64, + UintTy::U128 => Integer::I128, + }, + false, ), ), - chalk_ir::Scalar::Float(f) => Layout::scalar( + ), + TyKind::Float(f) => Layout::scalar( + dl, + scalar_unit( dl, - scalar_unit( - dl, - Primitive::Float(match f { - FloatTy::F16 => Float::F16, - FloatTy::F32 => Float::F32, - FloatTy::F64 => Float::F64, - FloatTy::F128 => Float::F128, - }), - ), + Primitive::Float(match f { + FloatTy::F16 => Float::F16, + FloatTy::F32 => Float::F32, + FloatTy::F64 => Float::F64, + FloatTy::F128 => Float::F128, + }), ), - }, - TyKind::Tuple(len, tys) => { - let kind = if *len == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; + ), + TyKind::Tuple(tys) => { + let kind = + if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; let fields = tys - .iter(Interner) - .map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), trait_env.clone())) + .iter() + .map(|k| db.layout_of_ty(k, trait_env.clone())) .collect::, _>>()?; let fields = fields.iter().map(|it| &**it).collect::>(); let fields = fields.iter().collect::>(); @@ -248,11 +260,11 @@ pub fn layout_of_ty_query( } TyKind::Array(element, count) => { let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64; - let element = db.layout_of_ty(element.clone(), trait_env)?; + let element = db.layout_of_ty(element, trait_env)?; cx.calc.array_like::<_, _, ()>(&element, Some(count))? } TyKind::Slice(element) => { - let element = db.layout_of_ty(element.clone(), trait_env)?; + let element = db.layout_of_ty(element, trait_env)?; cx.calc.array_like::<_, _, ()>(&element, None)? } TyKind::Str => { @@ -260,18 +272,21 @@ pub fn layout_of_ty_query( cx.calc.array_like::<_, _, ()>(&Layout::scalar(dl, element), None)? } // Potentially-wide pointers. - TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => { + TyKind::Ref(_, pointee, _) | TyKind::RawPtr(pointee, _) => { let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO)); - if matches!(ty.kind(Interner), TyKind::Ref(..)) { + if matches!(ty.kind(), TyKind::Ref(..)) { data_ptr.valid_range_mut().start = 1; } + // FIXME(next-solver) // let pointee = tcx.normalize_erasing_regions(param_env, pointee); // if pointee.is_sized(tcx.at(DUMMY_SP), param_env) { - // return Ok(tcx.mk_layout(LayoutData::scalar(cx, data_ptr))); + // return Ok(tcx.mk_layout(LayoutS::scalar(cx, data_ptr))); // } - let mut unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone()); + let unsized_part = struct_tail_erasing_lifetimes(db, pointee); + // FIXME(next-solver) + /* if let TyKind::AssociatedType(id, subst) = unsized_part.kind(Interner) { unsized_part = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy { associated_ty_id: *id, @@ -280,11 +295,12 @@ pub fn layout_of_ty_query( .intern(Interner); } unsized_part = normalize(db, trait_env, unsized_part); - let metadata = match unsized_part.kind(Interner) { + */ + let metadata = match unsized_part.kind() { TyKind::Slice(_) | TyKind::Str => { scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false)) } - TyKind::Dyn(..) => { + TyKind::Dynamic(..) => { let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO)); vtable.valid_range_mut().start = 1; vtable @@ -299,97 +315,87 @@ pub fn layout_of_ty_query( LayoutData::scalar_pair(dl, data_ptr, metadata) } TyKind::Never => LayoutData::never_type(dl), - TyKind::FnDef(..) | TyKind::Dyn(_) | TyKind::Foreign(_) => { - let sized = matches!(kind, TyKind::FnDef(..)); - LayoutData::unit(dl, sized) - } - TyKind::Function(_) => { + TyKind::FnDef(..) => LayoutData::unit(dl, true), + TyKind::Dynamic(..) | TyKind::Foreign(_) => LayoutData::unit(dl, false), + TyKind::FnPtr(..) => { let mut ptr = scalar_unit(dl, Primitive::Pointer(dl.instruction_address_space)); ptr.valid_range_mut().start = 1; Layout::scalar(dl, ptr) } - TyKind::OpaqueType(opaque_ty_id, _) => { - let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into()); - match impl_trait_id { - crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { - let infer = db.infer(func.into()); - return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env); - } - crate::ImplTraitId::TypeAliasImplTrait(..) => { - return Err(LayoutError::NotImplemented); - } - crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { - return Err(LayoutError::NotImplemented); - } - } - } - TyKind::Closure(c, subst) => { - let InternedClosure(def, _) = db.lookup_intern_closure((*c).into()); - let infer = db.infer(def); - let (captures, _) = infer.closure_info(c); + TyKind::Closure(id, args) => { + let def = db.lookup_intern_closure(id.0); + let infer = db.infer(def.0); + let (captures, _) = infer.closure_info(&id.0.into()); let fields = captures .iter() .map(|it| { - db.layout_of_ty( - it.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()), - trait_env.clone(), - ) + let ty = it + .ty + .clone() + .substitute( + Interner, + &ClosureSubst(&convert_args_for_result(interner, args.inner())) + .parent_subst(db), + ) + .to_nextsolver(interner); + db.layout_of_ty(ty, trait_env.clone()) }) .collect::, _>>()?; let fields = fields.iter().map(|it| &**it).collect::>(); let fields = fields.iter().collect::>(); cx.calc.univariant(&fields, &ReprOptions::default(), StructKind::AlwaysSized)? } - TyKind::Coroutine(_, _) | TyKind::CoroutineWitness(_, _) => { + + TyKind::Coroutine(_, _) + | TyKind::CoroutineWitness(_, _) + | TyKind::CoroutineClosure(_, _) => { + return Err(LayoutError::NotImplemented); + } + + TyKind::Pat(_, _) | TyKind::UnsafeBinder(_) => { return Err(LayoutError::NotImplemented); } - TyKind::Error => return Err(LayoutError::HasErrorType), - TyKind::AssociatedType(id, subst) => { - // Try again with `TyKind::Alias` to normalize the associated type. - // Usually we should not try to normalize `TyKind::AssociatedType`, but layout calculation is used - // in monomorphized MIR where this is okay. If outside monomorphization, this will lead to cycle, - // which we will recover from with an error. - let ty = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy { - associated_ty_id: *id, - substitution: subst.clone(), - })) - .intern(Interner); - return db.layout_of_ty(ty, trait_env); + + TyKind::Error(_) => return Err(LayoutError::HasErrorType), + TyKind::Placeholder(_) + | TyKind::Bound(..) + | TyKind::Infer(..) + | TyKind::Param(..) + | TyKind::Alias(..) => { + return Err(LayoutError::HasPlaceholder); } - TyKind::Alias(_) - | TyKind::Placeholder(_) - | TyKind::BoundVar(_) - | TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder), }; Ok(Arc::new(result)) } -pub(crate) fn layout_of_ty_cycle_result( +pub(crate) fn layout_of_ty_cycle_result<'db>( _: &dyn HirDatabase, - _: Ty, + _: Ty<'db>, _: Arc, ) -> Result, LayoutError> { Err(LayoutError::RecursiveTypeWithoutIndirection) } -fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty { - match pointee.kind(Interner) { - &TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), ref subst) => { - let data = i.fields(db); +fn struct_tail_erasing_lifetimes<'a>(db: &'a dyn HirDatabase, pointee: Ty<'a>) -> Ty<'a> { + match pointee.kind() { + TyKind::Adt(def, args) => { + let struct_id = match def.inner().id { + AdtId::StructId(id) => id, + _ => return pointee, + }; + let data = struct_id.fields(db); let mut it = data.fields().iter().rev(); match it.next() { Some((f, _)) => { - let last_field_ty = field_ty(db, i.into(), f, subst); + let last_field_ty = field_ty(db, struct_id.into(), f, &args); struct_tail_erasing_lifetimes(db, last_field_ty) } None => pointee, } } - TyKind::Tuple(_, subst) => { - if let Some(last_field_ty) = - subst.iter(Interner).last().and_then(|arg| arg.ty(Interner)) - { - struct_tail_erasing_lifetimes(db, last_field_ty.clone()) + TyKind::Tuple(tys) => { + if let Some(last_field_ty) = tys.iter().last() { + struct_tail_erasing_lifetimes(db, last_field_ty) } else { pointee } @@ -398,13 +404,13 @@ fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty { } } -fn field_ty( - db: &dyn HirDatabase, +fn field_ty<'a>( + db: &'a dyn HirDatabase, def: hir_def::VariantId, fd: LocalFieldId, - subst: &Substitution, -) -> Ty { - db.field_types(def)[fd].clone().substitute(Interner, subst) + args: &GenericArgs<'a>, +) -> Ty<'a> { + db.field_types_ns(def)[fd].instantiate(DbInterner::new_with(db, None, None), args) } fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index 3f310c26ec14a..9a746ca888589 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -4,26 +4,25 @@ use std::{cmp, ops::Bound}; use hir_def::{ AdtId, VariantId, - layout::{Integer, ReprOptions, TargetDataLayout}, signatures::{StructFlags, VariantFields}, }; use intern::sym; +use rustc_abi::{Integer, ReprOptions, TargetDataLayout}; use rustc_index::IndexVec; use smallvec::SmallVec; use triomphe::Arc; use crate::{ - Substitution, TraitEnvironment, + TraitEnvironment, db::HirDatabase, - layout::{Layout, LayoutError, field_ty}, + layout::{Layout, LayoutCx, LayoutError, field_ty}, + next_solver::GenericArgs, }; -use super::LayoutCx; - -pub fn layout_of_adt_query( - db: &dyn HirDatabase, +pub fn layout_of_adt_query<'db>( + db: &'db dyn HirDatabase, def: AdtId, - subst: Substitution, + args: GenericArgs<'db>, trait_env: Arc, ) -> Result, LayoutError> { let krate = trait_env.krate; @@ -35,7 +34,7 @@ pub fn layout_of_adt_query( let handle_variant = |def: VariantId, var: &VariantFields| { var.fields() .iter() - .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), trait_env.clone())) + .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &args), trait_env.clone())) .collect::, _>>() }; let (variants, repr, is_special_no_niche) = match def { @@ -96,6 +95,15 @@ pub fn layout_of_adt_query( Ok(Arc::new(result)) } +pub(crate) fn layout_of_adt_cycle_result<'db>( + _: &'db dyn HirDatabase, + _def: AdtId, + _args: GenericArgs<'db>, + _trait_env: Arc, +) -> Result, LayoutError> { + Err(LayoutError::RecursiveTypeWithoutIndirection) +} + fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, Bound) { let attrs = db.attrs(def.into()); let get = |name| { @@ -120,15 +128,6 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end)) } -pub(crate) fn layout_of_adt_cycle_result( - _: &dyn HirDatabase, - _: AdtId, - _: Substitution, - _: Arc, -) -> Result, LayoutError> { - Err(LayoutError::RecursiveTypeWithoutIndirection) -} - /// Finds the appropriate Integer type and signedness for the given /// signed discriminant range and `#[repr]` attribute. /// N.B.: `u128` values above `i128::MAX` will be treated as signed, but diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs index 82d0ed4f19470..8a7d93d50c05b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs @@ -1,6 +1,6 @@ //! Target dependent parameters needed for layouts -use base_db::Crate; +use base_db::{Crate, target::TargetLoadError}; use hir_def::layout::TargetDataLayout; use rustc_abi::{AddressSpace, AlignFromBytesError, TargetDataLayoutErrors}; use triomphe::Arc; @@ -10,9 +10,9 @@ use crate::db::HirDatabase; pub fn target_data_layout_query( db: &dyn HirDatabase, krate: Crate, -) -> Result, Arc> { - match &krate.workspace_data(db).data_layout { - Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it, AddressSpace::ZERO) { +) -> Result, TargetLoadError> { + match &krate.workspace_data(db).target { + Ok(target) => match TargetDataLayout::parse_from_llvm_datalayout_string(&target.data_layout, AddressSpace::ZERO) { Ok(it) => Ok(Arc::new(it)), Err(e) => { Err(match e { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index b3bc226ec93c7..523ddad94666b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -1,3 +1,4 @@ +use base_db::target::TargetData; use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; @@ -11,13 +12,15 @@ use crate::{ Interner, Substitution, db::HirDatabase, layout::{Layout, LayoutError}, + next_solver::{DbInterner, mapping::ChalkToNextSolver}, + setup_tracing, test_db::TestDB, }; mod closure; -fn current_machine_data_layout() -> String { - project_model::toolchain_info::target_data_layout::get( +fn current_machine_target_data() -> TargetData { + project_model::toolchain_info::target_data::get( QueryConfig::Rustc(&Sysroot::empty(), &std::env::current_dir().unwrap()), None, &FxHashMap::default(), @@ -29,7 +32,9 @@ fn eval_goal( #[rust_analyzer::rust_fixture] ra_fixture: &str, minicore: &str, ) -> Result, LayoutError> { - let target_data_layout = current_machine_data_layout(); + let _tracing = setup_tracing(); + let target_data = current_machine_target_data(); + let target_data_layout = target_data.data_layout; let ra_fixture = format!( "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}", ); @@ -83,13 +88,16 @@ fn eval_goal( db.ty(ty_id.into()).substitute(Interner, &Substitution::empty(Interner)) } }; - db.layout_of_ty( - goal_ty, - db.trait_environment(match adt_or_type_alias_id { - Either::Left(adt) => hir_def::GenericDefId::AdtId(adt), - Either::Right(ty) => hir_def::GenericDefId::TypeAliasId(ty), - }), - ) + salsa::attach(&db, || { + let interner = DbInterner::new_with(&db, None, None); + db.layout_of_ty( + goal_ty.to_nextsolver(interner), + db.trait_environment(match adt_or_type_alias_id { + Either::Left(adt) => hir_def::GenericDefId::AdtId(adt), + Either::Right(ty) => hir_def::GenericDefId::TypeAliasId(ty), + }), + ) + }) } /// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait` @@ -97,7 +105,9 @@ fn eval_expr( #[rust_analyzer::rust_fixture] ra_fixture: &str, minicore: &str, ) -> Result, LayoutError> { - let target_data_layout = current_machine_data_layout(); + let _tracing = setup_tracing(); + let target_data = current_machine_target_data(); + let target_data_layout = target_data.data_layout; let ra_fixture = format!( "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}", ); @@ -125,7 +135,10 @@ fn eval_expr( .0; let infer = db.infer(function_id.into()); let goal_ty = infer.type_of_binding[b].clone(); - db.layout_of_ty(goal_ty, db.trait_environment(function_id.into())) + salsa::attach(&db, || { + let interner = DbInterner::new_with(&db, None, None); + db.layout_of_ty(goal_ty.to_nextsolver(interner), db.trait_environment(function_id.into())) + }) } #[track_caller] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index e787fd9b1e584..451622ef7472b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -21,6 +21,27 @@ extern crate rustc_pattern_analysis; #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_ast_ir; + +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_ast_ir as rustc_ast_ir; + +#[cfg(feature = "in-rust-tree")] +extern crate rustc_type_ir; + +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_type_ir as rustc_type_ir; + +#[cfg(feature = "in-rust-tree")] +extern crate rustc_next_trait_solver; + +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_next_trait_solver as rustc_next_trait_solver; + +#[cfg(feature = "in-rust-tree")] +extern crate rustc_data_structures as ena; + mod builder; mod chalk_db; mod chalk_ext; @@ -29,13 +50,16 @@ mod infer; mod inhabitedness; mod interner; mod lower; +mod lower_nextsolver; mod mapping; +pub mod next_solver; mod target_feature; mod tls; mod utils; pub mod autoderef; pub mod consteval; +pub mod consteval_nextsolver; pub mod db; pub mod diagnostics; pub mod display; @@ -57,7 +81,7 @@ mod variance; use std::hash::Hash; use chalk_ir::{ - NoSolution, + NoSolution, VariableKinds, fold::{Shift, TypeFoldable}, interner::HasInterner, }; @@ -69,6 +93,7 @@ use intern::{Symbol, sym}; use la_arena::{Arena, Idx}; use mir::{MirEvalError, VTableMap}; use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; +use rustc_type_ir::inherent::SliceLike; use syntax::ast::{ConstArg, make}; use traits::FnTrait; use triomphe::Arc; @@ -79,6 +104,10 @@ use crate::{ display::{DisplayTarget, HirDisplay}, generics::Generics, infer::unify::InferenceTable, + next_solver::{ + DbInterner, + mapping::{ChalkToNextSolver, convert_ty_for_result}, + }, }; pub use autoderef::autoderef; @@ -89,23 +118,27 @@ pub use infer::{ Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, InferenceResult, InferenceTyDiagnosticSource, OverloadedDeref, PointerCast, cast::CastError, - closure::{CaptureKind, CapturedItem}, + closure::analysis::{CaptureKind, CapturedItem}, could_coerce, could_unify, could_unify_deeply, }; pub use interner::Interner; pub use lower::{ ImplTraitLoweringMode, LifetimeElisionKind, ParamLoweringMode, TyDefId, TyLoweringContext, - ValueTyDefId, associated_type_shorthand_candidates, diagnostics::*, + ValueTyDefId, diagnostics::*, }; +pub use lower_nextsolver::associated_type_shorthand_candidates; pub use mapping::{ ToChalk, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx, lt_to_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, - to_foreign_def_id, to_placeholder_idx, + to_foreign_def_id, to_placeholder_idx, to_placeholder_idx_no_index, }; pub use method_resolution::check_orphan_rules; pub use target_feature::TargetFeatures; pub use traits::TraitEnvironment; -pub use utils::{Unsafety, all_super_traits, direct_super_traits, is_fn_unsafe_to_call}; +pub use utils::{ + TargetFeatureIsSafeInTarget, Unsafety, all_super_traits, direct_super_traits, + is_fn_unsafe_to_call, target_feature_is_safe_in_target, +}; pub use variance::Variance; pub use chalk_ir::{ @@ -121,9 +154,9 @@ pub type ClosureId = chalk_ir::ClosureId; pub type OpaqueTyId = chalk_ir::OpaqueTyId; pub type PlaceholderIndex = chalk_ir::PlaceholderIndex; -pub type VariableKind = chalk_ir::VariableKind; -pub type VariableKinds = chalk_ir::VariableKinds; pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds; + +pub(crate) type VariableKind = chalk_ir::VariableKind; /// Represents generic parameters and an item bound by them. When the item has parent, the binders /// also contain the generic parameters for its parent. See chalk's documentation for details. /// @@ -145,71 +178,64 @@ pub type GenericArgData = chalk_ir::GenericArgData; pub type Ty = chalk_ir::Ty; pub type TyKind = chalk_ir::TyKind; pub type TypeFlags = chalk_ir::TypeFlags; -pub type DynTy = chalk_ir::DynTy; +pub(crate) type DynTy = chalk_ir::DynTy; pub type FnPointer = chalk_ir::FnPointer; -// pub type FnSubst = chalk_ir::FnSubst; // a re-export so we don't lose the tuple constructor -pub use chalk_ir::FnSubst; -pub type ProjectionTy = chalk_ir::ProjectionTy; +pub(crate) use chalk_ir::FnSubst; // a re-export so we don't lose the tuple constructor + pub type AliasTy = chalk_ir::AliasTy; -pub type OpaqueTy = chalk_ir::OpaqueTy; -pub type InferenceVar = chalk_ir::InferenceVar; -pub type Lifetime = chalk_ir::Lifetime; -pub type LifetimeData = chalk_ir::LifetimeData; -pub type LifetimeOutlives = chalk_ir::LifetimeOutlives; +pub type ProjectionTy = chalk_ir::ProjectionTy; +pub(crate) type OpaqueTy = chalk_ir::OpaqueTy; +pub(crate) type InferenceVar = chalk_ir::InferenceVar; + +pub(crate) type Lifetime = chalk_ir::Lifetime; +pub(crate) type LifetimeData = chalk_ir::LifetimeData; +pub(crate) type LifetimeOutlives = chalk_ir::LifetimeOutlives; -pub type Const = chalk_ir::Const; -pub type ConstData = chalk_ir::ConstData; pub type ConstValue = chalk_ir::ConstValue; -pub type ConcreteConst = chalk_ir::ConcreteConst; -pub type ChalkTraitId = chalk_ir::TraitId; +pub type Const = chalk_ir::Const; +pub(crate) type ConstData = chalk_ir::ConstData; +pub(crate) type ConcreteConst = chalk_ir::ConcreteConst; + pub type TraitRef = chalk_ir::TraitRef; pub type QuantifiedWhereClause = Binders; -pub type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses; pub type Canonical = chalk_ir::Canonical; -pub type FnSig = chalk_ir::FnSig; +pub(crate) type ChalkTraitId = chalk_ir::TraitId; +pub(crate) type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses; + +pub(crate) type FnSig = chalk_ir::FnSig; pub type InEnvironment = chalk_ir::InEnvironment; -pub type Environment = chalk_ir::Environment; -pub type DomainGoal = chalk_ir::DomainGoal; -pub type Goal = chalk_ir::Goal; pub type AliasEq = chalk_ir::AliasEq; -pub type Solution = chalk_solve::Solution; -pub type Constraint = chalk_ir::Constraint; -pub type Constraints = chalk_ir::Constraints; -pub type ConstrainedSubst = chalk_ir::ConstrainedSubst; -pub type Guidance = chalk_solve::Guidance; pub type WhereClause = chalk_ir::WhereClause; -pub type CanonicalVarKind = chalk_ir::CanonicalVarKind; -pub type GoalData = chalk_ir::GoalData; -pub type Goals = chalk_ir::Goals; -pub type ProgramClauseData = chalk_ir::ProgramClauseData; -pub type ProgramClause = chalk_ir::ProgramClause; -pub type ProgramClauses = chalk_ir::ProgramClauses; -pub type TyData = chalk_ir::TyData; -pub type Variances = chalk_ir::Variances; +pub(crate) type DomainGoal = chalk_ir::DomainGoal; +pub(crate) type Goal = chalk_ir::Goal; + +pub(crate) type CanonicalVarKind = chalk_ir::CanonicalVarKind; +pub(crate) type GoalData = chalk_ir::GoalData; +pub(crate) type ProgramClause = chalk_ir::ProgramClause; /// A constant can have reference to other things. Memory map job is holding /// the necessary bits of memory of the const eval session to keep the constant /// meaningful. #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub enum MemoryMap { +pub enum MemoryMap<'db> { #[default] Empty, Simple(Box<[u8]>), - Complex(Box), + Complex(Box>), } #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct ComplexMemoryMap { +pub struct ComplexMemoryMap<'db> { memory: IndexMap, FxBuildHasher>, - vtable: VTableMap, + vtable: VTableMap<'db>, } -impl ComplexMemoryMap { +impl ComplexMemoryMap<'_> { fn insert(&mut self, addr: usize, val: Box<[u8]>) { match self.memory.entry(addr) { Entry::Occupied(mut e) => { @@ -224,8 +250,8 @@ impl ComplexMemoryMap { } } -impl MemoryMap { - pub fn vtable_ty(&self, id: usize) -> Result<&Ty, MirEvalError> { +impl<'db> MemoryMap<'db> { + pub fn vtable_ty(&self, id: usize) -> Result, MirEvalError> { match self { MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)), MemoryMap::Complex(cm) => cm.vtable.ty(id), @@ -275,10 +301,11 @@ impl MemoryMap { } } +// FIXME(next-solver): add a lifetime to this /// A concrete constant value #[derive(Debug, Clone, PartialEq, Eq)] pub enum ConstScalar { - Bytes(Box<[u8]>, MemoryMap), + Bytes(Box<[u8]>, MemoryMap<'static>), // FIXME: this is a hack to get around chalk not being able to represent unevaluatable // constants UnevaluatedConst(GeneralConstId, Substitution), @@ -299,6 +326,30 @@ impl Hash for ConstScalar { } } +/// A concrete constant value +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ConstScalarNs<'db> { + Bytes(Box<[u8]>, MemoryMap<'db>), + // FIXME: this is a hack to get around chalk not being able to represent unevaluatable + // constants + UnevaluatedConst(GeneralConstId, Substitution), + /// Case of an unknown value that rustc might know but we don't + // FIXME: this is a hack to get around chalk not being able to represent unevaluatable + // constants + // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177 + // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348 + Unknown, +} + +impl Hash for ConstScalarNs<'_> { + fn hash(&self, state: &mut H) { + core::mem::discriminant(self).hash(state); + if let ConstScalarNs::Bytes(b, _) = self { + b.hash(state) + } + } +} + /// Return an index of a parameter in the generic type parameter list by it's id. pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option { generics::generics(db, id.parent).type_or_const_param_idx(id) @@ -311,30 +362,11 @@ where Binders::empty(Interner, value.shifted_in_from(Interner, DebruijnIndex::ONE)) } -pub(crate) fn make_type_and_const_binders>( - which_is_const: impl Iterator>, - value: T, -) -> Binders { - Binders::new( - VariableKinds::from_iter( - Interner, - which_is_const.map(|x| { - if let Some(ty) = x { - chalk_ir::VariableKind::Const(ty) - } else { - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) - } - }), - ), - value, - ) -} - pub(crate) fn make_single_type_binders>( value: T, ) -> Binders { Binders::new( - VariableKinds::from_iter( + chalk_ir::VariableKinds::from_iter( Interner, std::iter::once(chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)), ), @@ -353,7 +385,7 @@ pub(crate) fn make_binders>( pub(crate) fn variable_kinds_from_iter( db: &dyn HirDatabase, iter: impl Iterator, -) -> VariableKinds { +) -> VariableKinds { VariableKinds::from_iter( Interner, iter.map(|x| match x { @@ -565,6 +597,27 @@ impl CallableSig { abi: fn_ptr.sig.abi, } } + pub fn from_fn_sig_and_header<'db>( + interner: DbInterner<'db>, + sig: crate::next_solver::Binder<'db, rustc_type_ir::FnSigTys>>, + header: rustc_type_ir::FnHeader>, + ) -> CallableSig { + CallableSig { + // FIXME: what to do about lifetime params? -> return PolyFnSig + params_and_return: Arc::from_iter( + sig.skip_binder() + .inputs_and_output + .iter() + .map(|t| convert_ty_for_result(interner, t)), + ), + is_varargs: header.c_variadic, + safety: match header.safety { + next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe, + next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, + }, + abi: header.abi, + } + } pub fn to_fn_ptr(&self) -> FnPointer { FnPointer { @@ -913,23 +966,15 @@ pub fn callable_sig_from_fn_trait( ) .build(); - let block = trait_env.block; - let trait_env = trait_env.env.clone(); - let obligation = - InEnvironment { goal: trait_ref.clone().cast(Interner), environment: trait_env.clone() }; - let canonical = table.canonicalize(obligation.clone()); - if db.trait_solve(krate, block, canonical.cast(Interner)).is_some() { - table.register_obligation(obligation.goal); + let goal: Goal = trait_ref.clone().cast(Interner); + let pred = goal.to_nextsolver(table.interner); + if !table.try_obligation(goal).no_solution() { + table.register_obligation(pred); let return_ty = table.normalize_projection_ty(projection); for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] { let fn_x_trait = fn_x.get_id(db, krate)?; trait_ref.trait_id = to_chalk_trait_id(fn_x_trait); - let obligation: chalk_ir::InEnvironment> = InEnvironment { - goal: trait_ref.clone().cast(Interner), - environment: trait_env.clone(), - }; - let canonical = table.canonicalize(obligation.clone()); - if db.trait_solve(krate, block, canonical.cast(Interner)).is_some() { + if !table.try_obligation(trait_ref.clone().cast(Interner)).no_solution() { let ret_ty = table.resolve_completely(return_ty); let args_ty = table.resolve_completely(args_ty); let params = args_ty @@ -963,7 +1008,7 @@ struct PlaceholderCollector<'db> { impl PlaceholderCollector<'_> { fn collect(&mut self, idx: PlaceholderIndex) { - let id = from_placeholder_idx(self.db, idx); + let id = from_placeholder_idx(self.db, idx).0; self.placeholders.insert(id); } } @@ -985,7 +1030,7 @@ impl TypeVisitor for PlaceholderCollector<'_> { outer_binder: DebruijnIndex, ) -> std::ops::ControlFlow { let has_placeholder_bits = TypeFlags::HAS_TY_PLACEHOLDER | TypeFlags::HAS_CT_PLACEHOLDER; - let TyData { kind, flags } = ty.data(Interner); + let chalk_ir::TyData { kind, flags } = ty.data(Interner); if let TyKind::Placeholder(idx) = kind { self.collect(*idx); @@ -1045,3 +1090,25 @@ pub(crate) enum DeclOrigin { pub(crate) struct DeclContext { pub(crate) origin: DeclOrigin, } + +pub fn setup_tracing() -> Option { + use std::env; + use std::sync::LazyLock; + use tracing_subscriber::{Registry, layer::SubscriberExt}; + use tracing_tree::HierarchicalLayer; + + static ENABLE: LazyLock = LazyLock::new(|| env::var("CHALK_DEBUG").is_ok()); + if !*ENABLE { + return None; + } + + let filter: tracing_subscriber::filter::Targets = + env::var("CHALK_DEBUG").ok().and_then(|it| it.parse().ok()).unwrap_or_default(); + let layer = HierarchicalLayer::default() + .with_indent_lines(true) + .with_ansi(false) + .with_indent_amount(2) + .with_writer(std::io::stderr); + let subscriber = Registry::default().with(filter).with(layer); + Some(tracing::subscriber::set_default(subscriber)) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index afee9606bd5f8..4d5172fd4f24f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -46,9 +46,9 @@ use stdx::{impl_from, never}; use triomphe::{Arc, ThinArc}; use crate::{ - AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnAbi, FnPointer, FnSig, - FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, LifetimeData, - LifetimeOutlives, PolyFnSig, ProgramClause, QuantifiedWhereClause, QuantifiedWhereClauses, + AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DomainGoal, DynTy, FnAbi, + FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, + LifetimeData, LifetimeOutlives, PolyFnSig, QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause, all_super_traits, consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic}, @@ -81,7 +81,7 @@ impl ImplTraitLoweringState { } } -pub(crate) struct PathDiagnosticCallbackData(TypeRefId); +pub(crate) struct PathDiagnosticCallbackData(pub(crate) TypeRefId); #[derive(Debug, Clone)] pub enum LifetimeElisionKind { @@ -299,6 +299,29 @@ impl<'a> TyLoweringContext<'a> { const_type, self.resolver.krate(), ), + hir_def::hir::Expr::UnaryOp { expr: inner_expr, op: hir_def::hir::UnaryOp::Neg } => { + if let hir_def::hir::Expr::Literal(literal) = &self.store[*inner_expr] { + // Only handle negation for signed integers and floats + match literal { + hir_def::hir::Literal::Int(_, _) | hir_def::hir::Literal::Float(_, _) => { + if let Some(negated_literal) = literal.clone().negate() { + intern_const_ref( + self.db, + &negated_literal.into(), + const_type, + self.resolver.krate(), + ) + } else { + unknown_const(const_type) + } + } + // For unsigned integers, chars, bools, etc., negation is not meaningful + _ => unknown_const(const_type), + } + } else { + unknown_const(const_type) + } + } _ => unknown_const(const_type), } } @@ -340,7 +363,13 @@ impl<'a> TyLoweringContext<'a> { res = Some(TypeNs::GenericParam(type_param_id)); match self.type_param_mode { ParamLoweringMode::Placeholder => { - TyKind::Placeholder(to_placeholder_idx(self.db, type_param_id.into())) + let generics = self.generics(); + let idx = generics.type_or_const_param_idx(type_param_id.into()).unwrap(); + TyKind::Placeholder(to_placeholder_idx( + self.db, + type_param_id.into(), + idx as u32, + )) } ParamLoweringMode::Variable => { let idx = @@ -777,7 +806,9 @@ impl<'a> TyLoweringContext<'a> { LifetimeNs::Static => static_lifetime(), LifetimeNs::LifetimeParam(id) => match self.type_param_mode { ParamLoweringMode::Placeholder => { - LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id)) + let generics = self.generics(); + let idx = generics.lifetime_idx(id).unwrap(); + LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id, idx as u32)) } ParamLoweringMode::Variable => { let idx = match self.generics().lifetime_idx(id) { @@ -804,15 +835,6 @@ pub(crate) fn callable_item_signature_query(db: &dyn HirDatabase, def: CallableD } } -pub fn associated_type_shorthand_candidates( - db: &dyn HirDatabase, - def: GenericDefId, - res: TypeNs, - mut cb: impl FnMut(&Name, TypeAliasId) -> Option, -) -> Option { - named_associated_type_shorthand_candidates(db, def, res, None, |name, _, id| cb(name, id)) -} - fn named_associated_type_shorthand_candidates( db: &dyn HirDatabase, // If the type parameter is defined in an impl and we're in a method, there @@ -889,7 +911,7 @@ fn named_associated_type_shorthand_candidates( pub(crate) type Diagnostics = Option>; -fn create_diagnostics(diagnostics: Vec) -> Diagnostics { +pub(crate) fn create_diagnostics(diagnostics: Vec) -> Diagnostics { (!diagnostics.is_empty()).then(|| ThinArc::from_header_and_iter((), diagnostics.into_iter())) } @@ -1105,8 +1127,9 @@ pub(crate) fn trait_environment_query( traits_in_scope .push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id())); } - let program_clause: chalk_ir::ProgramClause = pred.cast(Interner); - clauses.push(program_clause.into_from_env_clause(Interner)); + let program_clause: Binders = + pred.map(|pred| pred.into_from_env_goal(Interner).cast(Interner)); + clauses.push(program_clause); } } } @@ -1119,7 +1142,10 @@ pub(crate) fn trait_environment_query( let substs = TyBuilder::placeholder_subst(db, trait_id); let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs }; let pred = WhereClause::Implemented(trait_ref); - clauses.push(pred.cast::(Interner).into_from_env_clause(Interner)); + clauses.push(Binders::empty( + Interner, + pred.cast::(Interner).into_from_env_goal(Interner), + )); } let subst = generics.placeholder_subst(db); @@ -1128,15 +1154,30 @@ pub(crate) fn trait_environment_query( if let Some(implicitly_sized_clauses) = implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver) { - clauses.extend( - implicitly_sized_clauses.map(|pred| { - pred.cast::(Interner).into_from_env_clause(Interner) - }), - ); + clauses.extend(implicitly_sized_clauses.map(|pred| { + Binders::empty( + Interner, + pred.into_from_env_goal(Interner).cast::(Interner), + ) + })); }; } - let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses); + let clauses = chalk_ir::ProgramClauses::from_iter( + Interner, + clauses.into_iter().map(|g| { + chalk_ir::ProgramClause::new( + Interner, + chalk_ir::ProgramClauseData(g.map(|g| chalk_ir::ProgramClauseImplication { + consequence: g, + conditions: chalk_ir::Goals::empty(Interner), + constraints: chalk_ir::Constraints::empty(Interner), + priority: chalk_ir::ClausePriority::High, + })), + ) + }), + ); + let env = chalk_ir::Environment { clauses }; TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env) } @@ -1160,24 +1201,8 @@ pub(crate) fn generic_predicates_query( generic_predicates_filtered_by(db, def, |_, _| true).0 } -pub(crate) fn generic_predicates_without_parent_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> GenericPredicates { - db.generic_predicates_without_parent_with_diagnostics(def).0 -} - -/// Resolve the where clause(s) of an item with generics, -/// except the ones inherited from the parent -pub(crate) fn generic_predicates_without_parent_with_diagnostics_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> (GenericPredicates, Diagnostics) { - generic_predicates_filtered_by(db, def, |_, d| d == def) -} - /// Resolve the where clause(s) of an item with generics, -/// except the ones inherited from the parent +/// with a given filter fn generic_predicates_filtered_by( db: &dyn HirDatabase, def: GenericDefId, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index 9519c38eeddfd..b0132e4dcbc46 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -220,13 +220,15 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { }; return (ty, None); } - TypeNs::TraitAliasId(_) => { - // FIXME(trait_alias): Implement trait alias. - return (TyKind::Error.intern(Interner), None); - } TypeNs::GenericParam(param_id) => match self.ctx.type_param_mode { ParamLoweringMode::Placeholder => { - TyKind::Placeholder(to_placeholder_idx(self.ctx.db, param_id.into())) + let generics = self.ctx.generics(); + let idx = generics.type_or_const_param_idx(param_id.into()).unwrap(); + TyKind::Placeholder(to_placeholder_idx( + self.ctx.db, + param_id.into(), + idx as u32, + )) } ParamLoweringMode::Variable => { let idx = match self.ctx.generics().type_or_const_param_idx(param_id.into()) { @@ -311,8 +313,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { TypeNs::AdtId(_) | TypeNs::EnumVariantId(_) | TypeNs::TypeAliasId(_) - | TypeNs::TraitId(_) - | TypeNs::TraitAliasId(_) => {} + | TypeNs::TraitId(_) => {} } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs new file mode 100644 index 0000000000000..0076446a958b0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs @@ -0,0 +1,1932 @@ +//! Methods for lowering the HIR to types. There are two main cases here: +//! +//! - Lowering a type reference like `&usize` or `Option` to a +//! type: The entry point for this is `TyLoweringContext::lower_ty`. +//! - Building the type for an item: This happens through the `ty` query. +//! +//! This usually involves resolving names, collecting generic arguments etc. +#![allow(unused)] +// FIXME(next-solver): this should get removed as things get moved to rustc_type_ir from chalk_ir +pub(crate) mod path; + +use std::{ + cell::OnceCell, + iter, mem, + ops::{self, Deref, Not as _}, +}; + +use base_db::Crate; +use either::Either; +use hir_def::item_tree::FieldsShape; +use hir_def::{ + AdtId, AssocItemId, CallableDefId, ConstParamId, EnumVariantId, FunctionId, GenericDefId, + GenericParamId, ImplId, ItemContainerId, LocalFieldId, Lookup, StructId, TraitId, TypeAliasId, + TypeOrConstParamId, VariantId, + expr_store::{ + ExpressionStore, + path::{GenericArg, Path}, + }, + hir::generics::{TypeOrConstParamData, WherePredicate}, + lang_item::LangItem, + resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, + signatures::{FunctionSignature, TraitFlags, TypeAliasFlags}, + type_ref::{ + ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, + TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId, + }, +}; +use hir_def::{ConstId, StaticId}; +use hir_expand::name::Name; +use intern::sym; +use la_arena::{Arena, ArenaMap, Idx}; +use path::{PathDiagnosticCallback, PathLoweringContext, builtin}; +use rustc_ast_ir::Mutability; +use rustc_hash::FxHashSet; +use rustc_pattern_analysis::Captures; +use rustc_type_ir::{ + AliasTyKind, ConstKind, DebruijnIndex, ExistentialPredicate, ExistentialProjection, + ExistentialTraitRef, FnSig, OutlivesPredicate, + TyKind::{self}, + TypeVisitableExt, + inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _}, +}; +use salsa::plumbing::AsId; +use smallvec::{SmallVec, smallvec}; +use stdx::never; +use triomphe::Arc; + +use crate::ValueTyDefId; +use crate::{ + FnAbi, ImplTraitId, Interner, ParamKind, TyDefId, TyLoweringDiagnostic, + TyLoweringDiagnosticKind, + consteval_nextsolver::{intern_const_ref, path_to_const, unknown_const_as_generic}, + db::HirDatabase, + generics::{Generics, generics, trait_self_param_idx}, + lower::{Diagnostics, PathDiagnosticCallbackData, create_diagnostics}, + next_solver::{ + AdtDef, AliasTy, Binder, BoundExistentialPredicates, BoundRegionKind, BoundTyKind, + BoundVarKind, BoundVarKinds, Clause, Clauses, Const, DbInterner, EarlyBinder, + EarlyParamRegion, ErrorGuaranteed, GenericArgs, PolyFnSig, Predicate, Region, SolverDefId, + TraitPredicate, TraitRef, Ty, Tys, abi::Safety, mapping::ChalkToNextSolver, + }, +}; + +#[derive(PartialEq, Eq, Debug, Hash)] +pub struct ImplTraits<'db> { + pub(crate) impl_traits: Arena>, +} + +#[derive(PartialEq, Eq, Debug, Hash)] +pub(crate) struct ImplTrait<'db> { + pub(crate) predicates: Vec>, +} + +pub(crate) type ImplTraitIdx<'db> = Idx>; + +#[derive(Debug, Default)] +struct ImplTraitLoweringState<'db> { + /// When turning `impl Trait` into opaque types, we have to collect the + /// bounds at the same time to get the IDs correct (without becoming too + /// complicated). + mode: ImplTraitLoweringMode, + // This is structured as a struct with fields and not as an enum because it helps with the borrow checker. + opaque_type_data: Arena>, + param_and_variable_counter: u16, +} +impl<'db> ImplTraitLoweringState<'db> { + fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState<'db> { + Self { mode, opaque_type_data: Arena::new(), param_and_variable_counter: 0 } + } +} + +#[derive(Debug, Clone)] +pub(crate) enum LifetimeElisionKind<'db> { + /// Create a new anonymous lifetime parameter and reference it. + /// + /// If `report_in_path`, report an error when encountering lifetime elision in a path: + /// ```compile_fail + /// struct Foo<'a> { x: &'a () } + /// async fn foo(x: Foo) {} + /// ``` + /// + /// Note: the error should not trigger when the elided lifetime is in a pattern or + /// expression-position path: + /// ``` + /// struct Foo<'a> { x: &'a () } + /// async fn foo(Foo { x: _ }: Foo<'_>) {} + /// ``` + AnonymousCreateParameter { report_in_path: bool }, + + /// Replace all anonymous lifetimes by provided lifetime. + Elided(Region<'db>), + + /// Give a hard error when either `&` or `'_` is written. Used to + /// rule out things like `where T: Foo<'_>`. Does not imply an + /// error on default object bounds (e.g., `Box`). + AnonymousReportError, + + /// Resolves elided lifetimes to `'static` if there are no other lifetimes in scope, + /// otherwise give a warning that the previous behavior of introducing a new early-bound + /// lifetime is a bug and will be removed (if `only_lint` is enabled). + StaticIfNoLifetimeInScope { only_lint: bool }, + + /// Signal we cannot find which should be the anonymous lifetime. + ElisionFailure, + + /// Infer all elided lifetimes. + Infer, +} + +impl<'db> LifetimeElisionKind<'db> { + #[inline] + pub(crate) fn for_const( + interner: DbInterner<'db>, + const_parent: ItemContainerId, + ) -> LifetimeElisionKind<'db> { + match const_parent { + ItemContainerId::ExternBlockId(_) | ItemContainerId::ModuleId(_) => { + LifetimeElisionKind::Elided(Region::new_static(interner)) + } + ItemContainerId::ImplId(_) => { + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: true } + } + ItemContainerId::TraitId(_) => { + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: false } + } + } + } + + #[inline] + pub(crate) fn for_fn_params(data: &FunctionSignature) -> LifetimeElisionKind<'db> { + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: data.is_async() } + } + + #[inline] + pub(crate) fn for_fn_ret(interner: DbInterner<'db>) -> LifetimeElisionKind<'db> { + // FIXME: We should use the elided lifetime here, or `ElisionFailure`. + LifetimeElisionKind::Elided(Region::error(interner)) + } +} + +#[derive(Debug)] +pub(crate) struct TyLoweringContext<'db, 'a> { + pub db: &'db dyn HirDatabase, + interner: DbInterner<'db>, + resolver: &'a Resolver<'db>, + store: &'a ExpressionStore, + def: GenericDefId, + generics: OnceCell, + in_binders: DebruijnIndex, + impl_trait_mode: ImplTraitLoweringState<'db>, + /// Tracks types with explicit `?Sized` bounds. + pub(crate) unsized_types: FxHashSet>, + pub(crate) diagnostics: Vec, + lifetime_elision: LifetimeElisionKind<'db>, +} + +impl<'db, 'a> TyLoweringContext<'db, 'a> { + pub(crate) fn new( + db: &'db dyn HirDatabase, + resolver: &'a Resolver<'db>, + store: &'a ExpressionStore, + def: GenericDefId, + lifetime_elision: LifetimeElisionKind<'db>, + ) -> Self { + let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed); + let in_binders = DebruijnIndex::ZERO; + Self { + db, + interner: DbInterner::new_with(db, Some(resolver.krate()), None), + resolver, + def, + generics: Default::default(), + store, + in_binders, + impl_trait_mode, + unsized_types: FxHashSet::default(), + diagnostics: Vec::new(), + lifetime_elision, + } + } + + pub(crate) fn set_lifetime_elision(&mut self, lifetime_elision: LifetimeElisionKind<'db>) { + self.lifetime_elision = lifetime_elision; + } + + pub(crate) fn with_debruijn( + &mut self, + debruijn: DebruijnIndex, + f: impl FnOnce(&mut TyLoweringContext<'db, '_>) -> T, + ) -> T { + let old_debruijn = mem::replace(&mut self.in_binders, debruijn); + let result = f(self); + self.in_binders = old_debruijn; + result + } + + pub(crate) fn with_shifted_in( + &mut self, + debruijn: DebruijnIndex, + f: impl FnOnce(&mut TyLoweringContext<'db, '_>) -> T, + ) -> T { + self.with_debruijn(self.in_binders.shifted_in(debruijn.as_u32()), f) + } + + pub(crate) fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { + Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self } + } + + pub(crate) fn impl_trait_mode(&mut self, impl_trait_mode: ImplTraitLoweringMode) -> &mut Self { + self.impl_trait_mode = ImplTraitLoweringState::new(impl_trait_mode); + self + } + + pub(crate) fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) { + self.diagnostics.push(TyLoweringDiagnostic { source: type_ref, kind }); + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)] +pub(crate) enum ImplTraitLoweringMode { + /// `impl Trait` gets lowered into an opaque type that doesn't unify with + /// anything except itself. This is used in places where values flow 'out', + /// i.e. for arguments of the function we're currently checking, and return + /// types of functions we're calling. + Opaque, + /// `impl Trait` is disallowed and will be an error. + #[default] + Disallowed, +} + +impl<'db, 'a> TyLoweringContext<'db, 'a> { + pub(crate) fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> { + self.lower_ty_ext(type_ref).0 + } + + pub(crate) fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty<'db>) -> Const<'db> { + let const_ref = &self.store[const_ref.expr]; + match const_ref { + hir_def::hir::Expr::Path(path) => { + path_to_const(self.db, self.resolver, path, || self.generics(), const_type) + .unwrap_or_else(|| unknown_const(const_type)) + } + hir_def::hir::Expr::Literal(literal) => intern_const_ref( + self.db, + &match *literal { + hir_def::hir::Literal::Float(_, _) + | hir_def::hir::Literal::String(_) + | hir_def::hir::Literal::ByteString(_) + | hir_def::hir::Literal::CString(_) => LiteralConstRef::Unknown, + hir_def::hir::Literal::Char(c) => LiteralConstRef::Char(c), + hir_def::hir::Literal::Bool(b) => LiteralConstRef::Bool(b), + hir_def::hir::Literal::Int(val, _) => LiteralConstRef::Int(val), + hir_def::hir::Literal::Uint(val, _) => LiteralConstRef::UInt(val), + }, + const_type, + self.resolver.krate(), + ), + hir_def::hir::Expr::UnaryOp { expr: inner_expr, op: hir_def::hir::UnaryOp::Neg } => { + if let hir_def::hir::Expr::Literal(literal) = &self.store[*inner_expr] { + // Only handle negation for signed integers and floats + match literal { + hir_def::hir::Literal::Int(_, _) | hir_def::hir::Literal::Float(_, _) => { + if let Some(negated_literal) = literal.clone().negate() { + intern_const_ref( + self.db, + &negated_literal.into(), + const_type, + self.resolver.krate(), + ) + } else { + unknown_const(const_type) + } + } + // For unsigned integers, chars, bools, etc., negation is not meaningful + _ => unknown_const(const_type), + } + } else { + unknown_const(const_type) + } + } + _ => unknown_const(const_type), + } + } + + pub(crate) fn lower_path_as_const(&mut self, path: &Path, const_type: Ty<'db>) -> Const<'db> { + path_to_const(self.db, self.resolver, path, || self.generics(), const_type) + .unwrap_or_else(|| unknown_const(const_type)) + } + + fn generics(&self) -> &Generics { + self.generics.get_or_init(|| generics(self.db, self.def)) + } + + #[tracing::instrument(skip(self), ret)] + pub(crate) fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty<'db>, Option) { + let interner = self.interner; + let mut res = None; + let type_ref = &self.store[type_ref_id]; + tracing::debug!(?type_ref); + let ty = match type_ref { + TypeRef::Never => Ty::new(interner, TyKind::Never), + TypeRef::Tuple(inner) => { + let inner_tys = inner.iter().map(|&tr| self.lower_ty(tr)); + Ty::new_tup_from_iter(interner, inner_tys) + } + TypeRef::Path(path) => { + let (ty, res_) = + self.lower_path(path, PathId::from_type_ref_unchecked(type_ref_id)); + res = res_; + ty + } + &TypeRef::TypeParam(type_param_id) => { + res = Some(TypeNs::GenericParam(type_param_id)); + + let generics = self.generics(); + let (idx, data) = + generics.type_or_const_param(type_param_id.into()).expect("matching generics"); + let type_data = match data { + TypeOrConstParamData::TypeParamData(ty) => ty, + _ => unreachable!(), + }; + Ty::new_param( + self.interner, + type_param_id, + idx as u32, + type_data + .name + .as_ref() + .map_or_else(|| sym::MISSING_NAME.clone(), |d| d.symbol().clone()), + ) + } + &TypeRef::RawPtr(inner, mutability) => { + let inner_ty = self.lower_ty(inner); + Ty::new(interner, TyKind::RawPtr(inner_ty, lower_mutability(mutability))) + } + TypeRef::Array(array) => { + let inner_ty = self.lower_ty(array.ty); + let const_len = self.lower_const(&array.len, Ty::new_usize(interner)); + Ty::new_array_with_const_len(interner, inner_ty, const_len) + } + &TypeRef::Slice(inner) => { + let inner_ty = self.lower_ty(inner); + Ty::new_slice(interner, inner_ty) + } + TypeRef::Reference(ref_) => { + let inner_ty = self.lower_ty(ref_.ty); + // FIXME: It should infer the eldided lifetimes instead of stubbing with error + let lifetime = ref_ + .lifetime + .map_or_else(|| Region::error(interner), |lr| self.lower_lifetime(lr)); + Ty::new_ref(interner, lifetime, inner_ty, lower_mutability(ref_.mutability)) + } + TypeRef::Placeholder => Ty::new_error(interner, ErrorGuaranteed), + TypeRef::Fn(fn_) => { + let substs = self.with_shifted_in( + DebruijnIndex::from_u32(1), + |ctx: &mut TyLoweringContext<'_, '_>| { + Tys::new_from_iter( + interner, + fn_.params.iter().map(|&(_, tr)| ctx.lower_ty(tr)), + ) + }, + ); + Ty::new_fn_ptr( + interner, + Binder::dummy(FnSig { + abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), + safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe }, + c_variadic: fn_.is_varargs, + inputs_and_output: substs, + }), + ) + } + TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds), + TypeRef::ImplTrait(bounds) => { + match self.impl_trait_mode.mode { + ImplTraitLoweringMode::Opaque => { + let origin = match self.resolver.generic_def() { + Some(GenericDefId::FunctionId(it)) => Either::Left(it), + Some(GenericDefId::TypeAliasId(it)) => Either::Right(it), + _ => panic!( + "opaque impl trait lowering must be in function or type alias" + ), + }; + + // this dance is to make sure the data is in the right + // place even if we encounter more opaque types while + // lowering the bounds + let idx = self + .impl_trait_mode + .opaque_type_data + .alloc(ImplTrait { predicates: Vec::default() }); + + // FIXME(next-solver): this from_raw/into_raw dance isn't nice, but it's minimal + let impl_trait_id = origin.either( + |f| ImplTraitId::ReturnTypeImplTrait(f, Idx::from_raw(idx.into_raw())), + |a| ImplTraitId::TypeAliasImplTrait(a, Idx::from_raw(idx.into_raw())), + ); + let opaque_ty_id: SolverDefId = + self.db.intern_impl_trait_id(impl_trait_id).into(); + + // We don't want to lower the bounds inside the binders + // we're currently in, because they don't end up inside + // those binders. E.g. when we have `impl Trait>`, the `impl OtherTrait` can't refer + // to the self parameter from `impl Trait`, and the + // bounds aren't actually stored nested within each + // other, but separately. So if the `T` refers to a type + // parameter of the outer function, it's just one binder + // away instead of two. + let actual_opaque_type_data = self + .with_debruijn(DebruijnIndex::ZERO, |ctx| { + ctx.lower_impl_trait(opaque_ty_id, bounds, self.resolver.krate()) + }); + self.impl_trait_mode.opaque_type_data[idx] = actual_opaque_type_data; + + let args = GenericArgs::identity_for_item(self.interner, opaque_ty_id); + Ty::new_alias( + self.interner, + AliasTyKind::Opaque, + AliasTy::new_from_args(self.interner, opaque_ty_id, args), + ) + } + ImplTraitLoweringMode::Disallowed => { + // FIXME: report error + Ty::new_error(self.interner, ErrorGuaranteed) + } + } + } + TypeRef::Error => Ty::new_error(self.interner, ErrorGuaranteed), + }; + (ty, res) + } + + /// This is only for `generic_predicates_for_param`, where we can't just + /// lower the self types of the predicates since that could lead to cycles. + /// So we just check here if the `type_ref` resolves to a generic param, and which. + fn lower_ty_only_param(&self, type_ref: TypeRefId) -> Option { + let type_ref = &self.store[type_ref]; + let path = match type_ref { + TypeRef::Path(path) => path, + &TypeRef::TypeParam(idx) => return Some(idx.into()), + _ => return None, + }; + if path.type_anchor().is_some() { + return None; + } + if path.segments().len() > 1 { + return None; + } + let resolution = match self.resolver.resolve_path_in_type_ns(self.db, path) { + Some((it, None, _)) => it, + _ => return None, + }; + match resolution { + TypeNs::GenericParam(param_id) => Some(param_id.into()), + _ => None, + } + } + + #[inline] + fn on_path_diagnostic_callback(type_ref: TypeRefId) -> PathDiagnosticCallback<'static, 'db> { + PathDiagnosticCallback { + data: Either::Left(PathDiagnosticCallbackData(type_ref)), + callback: |data, this, diag| { + let type_ref = data.as_ref().left().unwrap().0; + this.push_diagnostic(type_ref, TyLoweringDiagnosticKind::PathDiagnostic(diag)) + }, + } + } + + #[inline] + fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'a, 'db> { + PathLoweringContext::new( + self, + Self::on_path_diagnostic_callback(path_id.type_ref()), + &self.store[path_id], + ) + } + + pub(crate) fn lower_path(&mut self, path: &Path, path_id: PathId) -> (Ty<'db>, Option) { + // Resolve the path (in type namespace) + if let Some(type_ref) = path.type_anchor() { + let (ty, res) = self.lower_ty_ext(type_ref); + let mut ctx = self.at_path(path_id); + return ctx.lower_ty_relative_path(ty, res); + } + + let mut ctx = self.at_path(path_id); + let (resolution, remaining_index) = match ctx.resolve_path_in_type_ns() { + Some(it) => it, + None => return (Ty::new_error(self.interner, ErrorGuaranteed), None), + }; + + if matches!(resolution, TypeNs::TraitId(_)) && remaining_index.is_none() { + // trait object type without dyn + let bound = TypeBound::Path(path_id, TraitBoundModifier::None); + let ty = self.lower_dyn_trait(&[bound]); + return (ty, None); + } + + ctx.lower_partly_resolved_path(resolution, false) + } + + fn lower_trait_ref_from_path( + &mut self, + path_id: PathId, + explicit_self_ty: Ty<'db>, + ) -> Option<(TraitRef<'db>, PathLoweringContext<'_, 'a, 'db>)> { + let mut ctx = self.at_path(path_id); + let resolved = match ctx.resolve_path_in_type_ns_fully()? { + // FIXME(trait_alias): We need to handle trait alias here. + TypeNs::TraitId(tr) => tr, + _ => return None, + }; + Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty), ctx)) + } + + fn lower_trait_ref( + &mut self, + trait_ref: &HirTraitRef, + explicit_self_ty: Ty<'db>, + ) -> Option> { + self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0) + } + + pub(crate) fn lower_where_predicate<'b>( + &'b mut self, + where_predicate: &'b WherePredicate, + ignore_bindings: bool, + generics: &Generics, + predicate_filter: PredicateFilter, + ) -> impl Iterator> + use<'a, 'b, 'db> { + match where_predicate { + WherePredicate::ForLifetime { target, bound, .. } + | WherePredicate::TypeBound { target, bound } => { + if let PredicateFilter::SelfTrait = predicate_filter { + let target_type = &self.store[*target]; + let self_type = 'is_self: { + if let TypeRef::Path(path) = target_type + && path.is_self_type() + { + break 'is_self true; + } + if let TypeRef::TypeParam(param) = target_type + && generics[param.local_id()].is_trait_self() + { + break 'is_self true; + } + false + }; + if !self_type { + return Either::Left(Either::Left(iter::empty())); + } + } + let self_ty = self.lower_ty(*target); + Either::Left(Either::Right(self.lower_type_bound(bound, self_ty, ignore_bindings))) + } + &WherePredicate::Lifetime { bound, target } => { + Either::Right(iter::once(Clause(Predicate::new( + self.interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::RegionOutlives(OutlivesPredicate( + self.lower_lifetime(bound), + self.lower_lifetime(target), + )), + )), + )))) + } + } + .into_iter() + } + + pub(crate) fn lower_type_bound<'b>( + &'b mut self, + bound: &'b TypeBound, + self_ty: Ty<'db>, + ignore_bindings: bool, + ) -> impl Iterator> + use<'b, 'a, 'db> { + let interner = self.interner; + let mut assoc_bounds = None; + let mut clause = None; + match bound { + &TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => { + // FIXME Don't silently drop the hrtb lifetimes here + if let Some((trait_ref, mut ctx)) = self.lower_trait_ref_from_path(path, self_ty) { + // FIXME(sized-hierarchy): Remove this bound modifications once we have implemented + // sized-hierarchy correctly. + let meta_sized = LangItem::MetaSized + .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); + let pointee_sized = LangItem::PointeeSized + .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); + if meta_sized.is_some_and(|it| it == trait_ref.def_id.0) { + // Ignore this bound + } else if pointee_sized.is_some_and(|it| it == trait_ref.def_id.0) { + // Regard this as `?Sized` bound + ctx.ty_ctx().unsized_types.insert(self_ty); + } else { + if !ignore_bindings { + assoc_bounds = ctx.assoc_type_bindings_from_type_bound(trait_ref); + } + clause = Some(Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + ))); + } + } + } + &TypeBound::Path(path, TraitBoundModifier::Maybe) => { + let sized_trait = LangItem::Sized.resolve_trait(self.db, self.resolver.krate()); + // Don't lower associated type bindings as the only possible relaxed trait bound + // `?Sized` has no of them. + // If we got another trait here ignore the bound completely. + let trait_id = self + .lower_trait_ref_from_path(path, self_ty) + .map(|(trait_ref, _)| trait_ref.def_id.0); + if trait_id == sized_trait { + self.unsized_types.insert(self_ty); + } + } + &TypeBound::Lifetime(l) => { + let lifetime = self.lower_lifetime(l); + clause = Some(Clause(Predicate::new( + self.interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::TypeOutlives(OutlivesPredicate( + self_ty, lifetime, + )), + )), + ))); + } + TypeBound::Use(_) | TypeBound::Error => {} + } + clause.into_iter().chain(assoc_bounds.into_iter().flatten()) + } + + fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> { + let interner = self.interner; + // FIXME: we should never create non-existential predicates in the first place + // For now, use an error type so we don't run into dummy binder issues + let self_ty = Ty::new_error(interner, ErrorGuaranteed); + // INVARIANT: The principal trait bound, if present, must come first. Others may be in any + // order but should be in the same order for the same set but possibly different order of + // bounds in the input. + // INVARIANT: If this function returns `DynTy`, there should be at least one trait bound. + // These invariants are utilized by `TyExt::dyn_trait()` and chalk. + let mut lifetime = None; + let bounds = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { + let mut lowered_bounds: Vec< + rustc_type_ir::Binder, ExistentialPredicate>>, + > = Vec::new(); + for b in bounds { + let db = ctx.db; + ctx.lower_type_bound(b, self_ty, false).for_each(|b| { + if let Some(bound) = b + .kind() + .map_bound(|c| match c { + rustc_type_ir::ClauseKind::Trait(t) => { + let id = t.def_id(); + let is_auto = + db.trait_signature(id.0).flags.contains(TraitFlags::AUTO); + if is_auto { + Some(ExistentialPredicate::AutoTrait(t.def_id())) + } else { + Some(ExistentialPredicate::Trait( + ExistentialTraitRef::new_from_args( + interner, + t.def_id(), + GenericArgs::new_from_iter( + interner, + t.trait_ref.args.iter().skip(1), + ), + ), + )) + } + } + rustc_type_ir::ClauseKind::Projection(p) => { + Some(ExistentialPredicate::Projection( + ExistentialProjection::new_from_args( + interner, + p.def_id(), + GenericArgs::new_from_iter( + interner, + p.projection_term.args.iter().skip(1), + ), + p.term, + ), + )) + } + rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => { + lifetime = Some(outlives_predicate.1); + None + } + rustc_type_ir::ClauseKind::RegionOutlives(_) + | rustc_type_ir::ClauseKind::ConstArgHasType(_, _) + | rustc_type_ir::ClauseKind::WellFormed(_) + | rustc_type_ir::ClauseKind::ConstEvaluatable(_) + | rustc_type_ir::ClauseKind::HostEffect(_) + | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(), + }) + .transpose() + { + lowered_bounds.push(bound); + } + }) + } + + let mut multiple_regular_traits = false; + let mut multiple_same_projection = false; + lowered_bounds.sort_unstable_by(|lhs, rhs| { + use std::cmp::Ordering; + match ((*lhs).skip_binder(), (*rhs).skip_binder()) { + (ExistentialPredicate::Trait(_), ExistentialPredicate::Trait(_)) => { + multiple_regular_traits = true; + // Order doesn't matter - we error + Ordering::Equal + } + ( + ExistentialPredicate::AutoTrait(lhs_id), + ExistentialPredicate::AutoTrait(rhs_id), + ) => lhs_id.0.cmp(&rhs_id.0), + (ExistentialPredicate::Trait(_), _) => Ordering::Less, + (_, ExistentialPredicate::Trait(_)) => Ordering::Greater, + (ExistentialPredicate::AutoTrait(_), _) => Ordering::Less, + (_, ExistentialPredicate::AutoTrait(_)) => Ordering::Greater, + ( + ExistentialPredicate::Projection(lhs), + ExistentialPredicate::Projection(rhs), + ) => { + let lhs_id = match lhs.def_id { + SolverDefId::TypeAliasId(id) => id, + _ => unreachable!(), + }; + let rhs_id = match rhs.def_id { + SolverDefId::TypeAliasId(id) => id, + _ => unreachable!(), + }; + // We only compare the `associated_ty_id`s. We shouldn't have + // multiple bounds for an associated type in the correct Rust code, + // and if we do, we error out. + if lhs_id == rhs_id { + multiple_same_projection = true; + } + lhs_id.as_id().index().cmp(&rhs_id.as_id().index()) + } + } + }); + + if multiple_regular_traits || multiple_same_projection { + return None; + } + + if !lowered_bounds.first().map_or(false, |b| { + matches!( + b.as_ref().skip_binder(), + ExistentialPredicate::Trait(_) | ExistentialPredicate::AutoTrait(_) + ) + }) { + return None; + } + + // As multiple occurrences of the same auto traits *are* permitted, we deduplicate the + // bounds. We shouldn't have repeated elements besides auto traits at this point. + lowered_bounds.dedup(); + + Some(BoundExistentialPredicates::new_from_iter(interner, lowered_bounds)) + }); + + if let Some(bounds) = bounds { + let region = match lifetime { + Some(it) => match it.kind() { + rustc_type_ir::RegionKind::ReBound(db, var) => Region::new_bound( + self.interner, + db.shifted_out_to_binder(DebruijnIndex::from_u32(2)), + var, + ), + _ => it, + }, + None => Region::new_static(self.interner), + }; + Ty::new_dynamic(self.interner, bounds, region) + } else { + // FIXME: report error + // (additional non-auto traits, associated type rebound, or no resolved trait) + Ty::new_error(self.interner, ErrorGuaranteed) + } + } + + fn lower_impl_trait( + &mut self, + def_id: SolverDefId, + bounds: &[TypeBound], + krate: Crate, + ) -> ImplTrait<'db> { + let interner = self.interner; + cov_mark::hit!(lower_rpit); + let args = GenericArgs::identity_for_item(interner, def_id); + let self_ty = Ty::new_alias( + self.interner, + rustc_type_ir::AliasTyKind::Opaque, + AliasTy::new_from_args(interner, def_id, args), + ); + let predicates = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { + let mut predicates = Vec::new(); + for b in bounds { + predicates.extend(ctx.lower_type_bound(b, self_ty, false)); + } + + if !ctx.unsized_types.contains(&self_ty) { + let sized_trait = LangItem::Sized.resolve_trait(self.db, krate); + let sized_clause = sized_trait.map(|trait_id| { + let trait_ref = TraitRef::new_from_args( + interner, + trait_id.into(), + GenericArgs::new_from_iter(interner, [self_ty.into()]), + ); + Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )) + }); + predicates.extend(sized_clause); + } + predicates.shrink_to_fit(); + predicates + }); + ImplTrait { predicates } + } + + pub(crate) fn lower_lifetime(&self, lifetime: LifetimeRefId) -> Region<'db> { + match self.resolver.resolve_lifetime(&self.store[lifetime]) { + Some(resolution) => match resolution { + LifetimeNs::Static => Region::new_static(self.interner), + LifetimeNs::LifetimeParam(id) => { + let idx = match self.generics().lifetime_idx(id) { + None => return Region::error(self.interner), + Some(idx) => idx, + }; + Region::new_early_param( + self.interner, + EarlyParamRegion { index: idx as u32, id }, + ) + } + }, + None => Region::error(self.interner), + } + } +} + +pub(crate) fn lower_mutability(m: hir_def::type_ref::Mutability) -> Mutability { + match m { + hir_def::type_ref::Mutability::Shared => Mutability::Not, + hir_def::type_ref::Mutability::Mut => Mutability::Mut, + } +} + +fn unknown_const(_ty: Ty<'_>) -> Const<'_> { + Const::new(DbInterner::conjure(), ConstKind::Error(ErrorGuaranteed)) +} + +pub(crate) fn impl_trait_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> Option>> { + db.impl_trait_with_diagnostics_ns(impl_id).map(|it| it.0) +} + +pub(crate) fn impl_trait_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> Option<(EarlyBinder<'db, TraitRef<'db>>, Diagnostics)> { + let impl_data = db.impl_signature(impl_id); + let resolver = impl_id.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let self_ty = db.impl_self_ty_ns(impl_id).skip_binder(); + let target_trait = impl_data.target_trait.as_ref()?; + let trait_ref = EarlyBinder::bind(ctx.lower_trait_ref(target_trait, self_ty)?); + Some((trait_ref, create_diagnostics(ctx.diagnostics))) +} + +pub(crate) fn return_type_impl_traits<'db>( + db: &'db dyn HirDatabase, + def: hir_def::FunctionId, +) -> Option>>> { + // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe + let data = db.function_signature(def); + let resolver = def.resolver(db); + let mut ctx_ret = + TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + if let Some(ret_type) = data.ret_type { + let _ret = ctx_ret.lower_ty(ret_type); + } + let return_type_impl_traits = + ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data }; + if return_type_impl_traits.impl_traits.is_empty() { + None + } else { + Some(Arc::new(EarlyBinder::bind(return_type_impl_traits))) + } +} + +pub(crate) fn type_alias_impl_traits<'db>( + db: &'db dyn HirDatabase, + def: hir_def::TypeAliasId, +) -> Option>>> { + let data = db.type_alias_signature(def); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + if let Some(type_ref) = data.ty { + let _ty = ctx.lower_ty(type_ref); + } + let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data }; + if type_alias_impl_traits.impl_traits.is_empty() { + None + } else { + Some(Arc::new(EarlyBinder::bind(type_alias_impl_traits))) + } +} + +/// Build the declared type of an item. This depends on the namespace; e.g. for +/// `struct Foo(usize)`, we have two types: The type of the struct itself, and +/// the constructor function `(usize) -> Foo` which lives in the values +/// namespace. +pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBinder<'db, Ty<'db>> { + let interner = DbInterner::new_with(db, None, None); + match def { + TyDefId::BuiltinType(it) => EarlyBinder::bind(builtin(interner, it)), + TyDefId::AdtId(it) => EarlyBinder::bind(Ty::new_adt( + interner, + AdtDef::new(it, interner), + GenericArgs::identity_for_item(interner, it.into()), + )), + TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics_ns(it).0, + } +} + +/// Build the declared type of a function. This should not need to look at the +/// function body. +fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> { + let interner = DbInterner::new_with(db, None, None); + EarlyBinder::bind(Ty::new_fn_def( + interner, + CallableDefId::FunctionId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + )) +} + +/// Build the declared type of a const. +fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'db, Ty<'db>> { + let resolver = def.resolver(db); + let data = db.const_signature(def); + let parent = def.loc(db).container; + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::AnonymousReportError, + ); + ctx.set_lifetime_elision(LifetimeElisionKind::for_const(ctx.interner, parent)); + EarlyBinder::bind(ctx.lower_ty(data.type_ref)) +} + +/// Build the declared type of a static. +fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder<'db, Ty<'db>> { + let resolver = def.resolver(db); + let module = resolver.module(); + let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block()); + let data = db.static_signature(def); + let parent = def.loc(db).container; + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::AnonymousReportError, + ); + ctx.set_lifetime_elision(LifetimeElisionKind::Elided(Region::new_static(ctx.interner))); + EarlyBinder::bind(ctx.lower_ty(data.type_ref)) +} + +/// Build the type of a tuple struct constructor. +fn type_for_struct_constructor<'db>( + db: &'db dyn HirDatabase, + def: StructId, +) -> Option>> { + let struct_data = def.fields(db); + match struct_data.shape { + FieldsShape::Record => None, + FieldsShape::Unit => Some(type_for_adt(db, def.into())), + FieldsShape::Tuple => { + let interner = DbInterner::new_with(db, None, None); + Some(EarlyBinder::bind(Ty::new_fn_def( + interner, + CallableDefId::StructId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + ))) + } + } +} + +/// Build the type of a tuple enum variant constructor. +fn type_for_enum_variant_constructor<'db>( + db: &'db dyn HirDatabase, + def: EnumVariantId, +) -> Option>> { + let struct_data = def.fields(db); + match struct_data.shape { + FieldsShape::Record => None, + FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())), + FieldsShape::Tuple => { + let interner = DbInterner::new_with(db, None, None); + Some(EarlyBinder::bind(Ty::new_fn_def( + interner, + CallableDefId::EnumVariantId(def).into(), + GenericArgs::identity_for_item(interner, def.loc(db).parent.into()), + ))) + } + } +} + +pub(crate) fn value_ty_query<'db>( + db: &'db dyn HirDatabase, + def: ValueTyDefId, +) -> Option>> { + match def { + ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), + ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), + ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())), + ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), + ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)), + ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)), + } +} + +pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + t: TypeAliasId, +) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { + let type_alias_data = db.type_alias_signature(t); + let mut diags = None; + let resolver = t.resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { + EarlyBinder::bind(Ty::new_foreign(interner, t.into())) + } else { + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + t.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + let res = EarlyBinder::bind( + type_alias_data + .ty + .map(|type_ref| ctx.lower_ty(type_ref)) + .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)), + ); + diags = create_diagnostics(ctx.diagnostics); + res + }; + (inner, diags) +} + +pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result<'db>( + db: &'db dyn HirDatabase, + _adt: TypeAliasId, +) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { + (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None) +} + +pub(crate) fn impl_self_ty_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> EarlyBinder<'db, Ty<'db>> { + db.impl_self_ty_with_diagnostics_ns(impl_id).0 +} + +pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { + let resolver = impl_id.resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + + let impl_data = db.impl_signature(impl_id); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let ty = ctx.lower_ty(impl_data.self_ty); + assert!(!ty.has_escaping_bound_vars()); + (EarlyBinder::bind(ty), create_diagnostics(ctx.diagnostics)) +} + +pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _impl_id: ImplId, +) -> (EarlyBinder<'_, Ty<'_>>, Diagnostics) { + (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None) +} + +pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> { + db.const_param_ty_with_diagnostics_ns(def).0 +} + +// returns None if def is a type arg +pub(crate) fn const_param_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + def: ConstParamId, +) -> (Ty<'db>, Diagnostics) { + let (parent_data, store) = db.generic_params_and_store(def.parent()); + let data = &parent_data[def.local_id()]; + let resolver = def.parent().resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &store, + def.parent(), + LifetimeElisionKind::AnonymousReportError, + ); + let ty = match data { + TypeOrConstParamData::TypeParamData(_) => { + never!(); + Ty::new_error(interner, ErrorGuaranteed) + } + TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), + }; + (ty, create_diagnostics(ctx.diagnostics)) +} + +pub(crate) fn field_types_query<'db>( + db: &'db dyn HirDatabase, + variant_id: VariantId, +) -> Arc>>> { + db.field_types_with_diagnostics_ns(variant_id).0 +} + +/// Build the type of all specific fields of a struct or enum variant. +pub(crate) fn field_types_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + variant_id: VariantId, +) -> (Arc>>>, Diagnostics) { + let var_data = variant_id.fields(db); + let fields = var_data.fields(); + if fields.is_empty() { + return (Arc::new(ArenaMap::default()), None); + } + + let (resolver, def): (_, GenericDefId) = match variant_id { + VariantId::StructId(it) => (it.resolver(db), it.into()), + VariantId::UnionId(it) => (it.resolver(db), it.into()), + VariantId::EnumVariantId(it) => (it.resolver(db), it.lookup(db).parent.into()), + }; + let mut res = ArenaMap::default(); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &var_data.store, + def, + LifetimeElisionKind::AnonymousReportError, + ); + for (field_id, field_data) in var_data.fields().iter() { + res.insert(field_id, EarlyBinder::bind(ctx.lower_ty(field_data.type_ref))); + } + (Arc::new(res), create_diagnostics(ctx.diagnostics)) +} + +/// This query exists only to be used when resolving short-hand associated types +/// like `T::Item`. +/// +/// See the analogous query in rustc and its comment: +/// +/// This is a query mostly to handle cycles somewhat gracefully; e.g. the +/// following bounds are disallowed: `T: Foo, U: Foo`, but +/// these are fine: `T: Foo, U: Foo<()>`. +#[tracing::instrument(skip(db), ret)] +pub(crate) fn generic_predicates_for_param_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, + param_id: TypeOrConstParamId, + assoc_name: Option, +) -> GenericPredicates<'db> { + let generics = generics(db, def); + let interner = DbInterner::new_with(db, None, None); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generics.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ); + + // we have to filter out all other predicates *first*, before attempting to lower them + let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred { + WherePredicate::ForLifetime { target, bound, .. } + | WherePredicate::TypeBound { target, bound, .. } => { + let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) }; + if invalid_target { + // FIXME(sized-hierarchy): Revisit and adjust this properly once we have implemented + // sized-hierarchy correctly. + // If this is filtered out without lowering, `?Sized` or `PointeeSized` is not gathered into + // `ctx.unsized_types` + let lower = || -> bool { + match bound { + TypeBound::Path(_, TraitBoundModifier::Maybe) => true, + TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => { + let TypeRef::Path(path) = &ctx.store[path.type_ref()] else { + return false; + }; + let Some(pointee_sized) = + LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate()) + else { + return false; + }; + // Lower the path directly with `Resolver` instead of PathLoweringContext` + // to prevent diagnostics duplications. + ctx.resolver.resolve_path_in_type_ns_fully(ctx.db, path).is_some_and( + |it| matches!(it, TypeNs::TraitId(tr) if tr == pointee_sized), + ) + } + _ => false, + } + }(); + if lower { + ctx.lower_where_predicate(pred, true, &generics, PredicateFilter::All) + .for_each(drop); + } + return false; + } + + match bound { + &TypeBound::ForLifetime(_, path) | &TypeBound::Path(path, _) => { + // Only lower the bound if the trait could possibly define the associated + // type we're looking for. + let path = &ctx.store[path]; + + let Some(assoc_name) = &assoc_name else { return true }; + let Some(TypeNs::TraitId(tr)) = + resolver.resolve_path_in_type_ns_fully(db, path) + else { + return false; + }; + + rustc_type_ir::elaborate::supertrait_def_ids(interner, tr.into()).any(|tr| { + tr.0.trait_items(db).items.iter().any(|(name, item)| { + matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name + }) + }) + } + TypeBound::Use(_) | TypeBound::Lifetime(_) | TypeBound::Error => false, + } + } + WherePredicate::Lifetime { .. } => false, + }; + let mut predicates = Vec::new(); + for maybe_parent_generics in + std::iter::successors(Some(&generics), |generics| generics.parent_generics()) + { + ctx.store = maybe_parent_generics.store(); + for pred in maybe_parent_generics.where_predicates() { + if predicate(pred, &mut ctx) { + predicates.extend(ctx.lower_where_predicate( + pred, + true, + maybe_parent_generics, + PredicateFilter::All, + )); + } + } + } + + let args = GenericArgs::identity_for_item(interner, def.into()); + if !args.is_empty() { + let explicitly_unsized_tys = ctx.unsized_types; + if let Some(implicitly_sized_predicates) = + implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &args, &resolver) + { + predicates.extend(implicitly_sized_predicates); + }; + } + GenericPredicates(predicates.is_empty().not().then(|| predicates.into())) +} + +pub(crate) fn generic_predicates_for_param_cycle_result( + _db: &dyn HirDatabase, + _def: GenericDefId, + _param_id: TypeOrConstParamId, + _assoc_name: Option, +) -> GenericPredicates<'_> { + GenericPredicates(None) +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GenericPredicates<'db>(Option]>>); + +impl<'db> ops::Deref for GenericPredicates<'db> { + type Target = [Clause<'db>]; + + fn deref(&self) -> &Self::Target { + self.0.as_deref().unwrap_or(&[]) + } +} + +#[derive(Copy, Clone, Debug)] +pub(crate) enum PredicateFilter { + SelfTrait, + All, +} + +/// Resolve the where clause(s) of an item with generics. +#[tracing::instrument(skip(db))] +pub(crate) fn generic_predicates_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, +) -> GenericPredicates<'db> { + generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true).0 +} + +pub(crate) fn generic_predicates_without_parent_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, +) -> GenericPredicates<'db> { + generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def).0 +} + +/// Resolve the where clause(s) of an item with generics, +/// except the ones inherited from the parent +pub(crate) fn generic_predicates_without_parent_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, +) -> (GenericPredicates<'db>, Diagnostics) { + generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def) +} + +/// Resolve the where clause(s) of an item with generics, +/// with a given filter +#[tracing::instrument(skip(db, filter), ret)] +pub(crate) fn generic_predicates_filtered_by<'db, F>( + db: &'db dyn HirDatabase, + def: GenericDefId, + predicate_filter: PredicateFilter, + filter: F, +) -> (GenericPredicates<'db>, Diagnostics) +where + F: Fn(GenericDefId) -> bool, +{ + let generics = generics(db, def); + let resolver = def.resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generics.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ); + + let mut predicates = Vec::new(); + for maybe_parent_generics in + std::iter::successors(Some(&generics), |generics| generics.parent_generics()) + { + ctx.store = maybe_parent_generics.store(); + for pred in maybe_parent_generics.where_predicates() { + tracing::debug!(?pred); + if filter(maybe_parent_generics.def()) { + // We deliberately use `generics` and not `maybe_parent_generics` here. This is not a mistake! + // If we use the parent generics + predicates.extend(ctx.lower_where_predicate( + pred, + false, + maybe_parent_generics, + predicate_filter, + )); + } + } + } + + let explicitly_unsized_tys = ctx.unsized_types; + + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); + if let Some(sized_trait) = sized_trait { + let (mut generics, mut def_id) = + (crate::next_solver::generics::generics(db, def.into()), def); + loop { + if filter(def_id) { + let self_idx = trait_self_param_idx(db, def_id); + for (idx, p) in generics.own_params.iter().enumerate() { + if let Some(self_idx) = self_idx + && p.index() as usize == self_idx + { + continue; + } + let GenericParamId::TypeParamId(param_id) = p.id else { + continue; + }; + let idx = idx as u32 + generics.parent_count as u32; + let param_ty = Ty::new_param(interner, param_id, idx, p.name.clone()); + if explicitly_unsized_tys.contains(¶m_ty) { + continue; + } + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_iter(interner, [param_ty.into()]), + ); + let clause = Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )); + predicates.push(clause); + } + } + + if let Some(g) = generics.parent { + generics = crate::next_solver::generics::generics(db, g.into()); + def_id = g; + } else { + break; + } + } + } + + // FIXME: rustc gathers more predicates by recursing through resulting trait predicates. + // See https://github.com/rust-lang/rust/blob/76c5ed2847cdb26ef2822a3a165d710f6b772217/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L689-L715 + + ( + GenericPredicates(predicates.is_empty().not().then(|| predicates.into())), + create_diagnostics(ctx.diagnostics), + ) +} + +/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound. +/// Exception is Self of a trait def. +fn implicitly_sized_clauses<'a, 'subst, 'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, + explicitly_unsized_tys: &'a FxHashSet>, + args: &'subst GenericArgs<'db>, + resolver: &Resolver<'db>, +) -> Option> + Captures<'a> + Captures<'subst>> { + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate())?; + + let trait_self_idx = trait_self_param_idx(db, def); + + Some( + args.iter() + .enumerate() + .filter_map( + move |(idx, generic_arg)| { + if Some(idx) == trait_self_idx { None } else { Some(generic_arg) } + }, + ) + .filter_map(|generic_arg| generic_arg.as_type()) + .filter(move |self_ty| !explicitly_unsized_tys.contains(self_ty)) + .map(move |self_ty| { + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_iter(interner, [self_ty.into()]), + ); + Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )) + }), + ) +} + +pub(crate) fn make_binders<'db, T: rustc_type_ir::TypeVisitable>>( + interner: DbInterner<'db>, + generics: &Generics, + value: T, +) -> Binder<'db, T> { + Binder::bind_with_vars( + value, + BoundVarKinds::new_from_iter( + interner, + generics.iter_id().map(|x| match x { + hir_def::GenericParamId::ConstParamId(_) => BoundVarKind::Const, + hir_def::GenericParamId::TypeParamId(_) => BoundVarKind::Ty(BoundTyKind::Anon), + hir_def::GenericParamId::LifetimeParamId(_) => { + BoundVarKind::Region(BoundRegionKind::Anon) + } + }), + ), + ) +} + +/// Checks if the provided generic arg matches its expected kind, then lower them via +/// provided closures. Use unknown if there was kind mismatch. +/// +pub(crate) fn lower_generic_arg<'a, 'db, T>( + db: &'db dyn HirDatabase, + kind_id: GenericParamId, + arg: &'a GenericArg, + this: &mut T, + store: &ExpressionStore, + for_type: impl FnOnce(&mut T, TypeRefId) -> Ty<'db> + 'a, + for_const: impl FnOnce(&mut T, &ConstRef, Ty<'db>) -> Const<'db> + 'a, + for_const_ty_path_fallback: impl FnOnce(&mut T, &Path, Ty<'db>) -> Const<'db> + 'a, + for_lifetime: impl FnOnce(&mut T, &LifetimeRefId) -> Region<'db> + 'a, +) -> crate::next_solver::GenericArg<'db> { + let interner = DbInterner::new_with(db, None, None); + let kind = match kind_id { + GenericParamId::TypeParamId(_) => ParamKind::Type, + GenericParamId::ConstParamId(id) => { + let ty = db.const_param_ty(id); + ParamKind::Const(ty) + } + GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, + }; + match (arg, kind) { + (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, *type_ref).into(), + (GenericArg::Const(c), ParamKind::Const(c_ty)) => { + for_const(this, c, c_ty.to_nextsolver(interner)).into() + } + (GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => { + for_lifetime(this, lifetime_ref).into() + } + (GenericArg::Const(_), ParamKind::Type) => Ty::new_error(interner, ErrorGuaranteed).into(), + (GenericArg::Lifetime(_), ParamKind::Type) => { + Ty::new_error(interner, ErrorGuaranteed).into() + } + (GenericArg::Type(t), ParamKind::Const(c_ty)) => match &store[*t] { + TypeRef::Path(p) => { + for_const_ty_path_fallback(this, p, c_ty.to_nextsolver(interner)).into() + } + _ => unknown_const_as_generic(c_ty.to_nextsolver(interner)), + }, + (GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => { + unknown_const(c_ty.to_nextsolver(interner)).into() + } + (GenericArg::Type(_), ParamKind::Lifetime) => Region::error(interner).into(), + (GenericArg::Const(_), ParamKind::Lifetime) => Region::error(interner).into(), + } +} + +/// Build the signature of a callable item (function, struct or enum variant). +pub(crate) fn callable_item_signature_query<'db>( + db: &'db dyn HirDatabase, + def: CallableDefId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + match def { + CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), + CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), + CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), + } +} + +fn fn_sig_for_fn<'db>( + db: &'db dyn HirDatabase, + def: FunctionId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + let data = db.function_signature(def); + let resolver = def.resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let mut ctx_params = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::for_fn_params(&data), + ); + let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr)); + + let ret = match data.ret_type { + Some(ret_type) => { + let mut ctx_ret = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::for_fn_ret(interner), + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + ctx_ret.lower_ty(ret_type) + } + None => Ty::new_tup(interner, &[]), + }; + + let inputs_and_output = Tys::new_from_iter(interner, params.chain(Some(ret))); + // If/when we track late bound vars, we need to switch this to not be `dummy` + EarlyBinder::bind(rustc_type_ir::Binder::dummy(FnSig { + abi: data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), + c_variadic: data.is_varargs(), + safety: if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, + inputs_and_output, + })) +} + +fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> { + let interner = DbInterner::new_with(db, None, None); + let args = GenericArgs::identity_for_item(interner, adt.into()); + let ty = Ty::new_adt(interner, AdtDef::new(adt, interner), args); + EarlyBinder::bind(ty) +} + +fn fn_sig_for_struct_constructor<'db>( + db: &'db dyn HirDatabase, + def: StructId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + let field_tys = db.field_types_ns(def.into()); + let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let ret = type_for_adt(db, def.into()).skip_binder(); + + let inputs_and_output = + Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret))); + EarlyBinder::bind(Binder::dummy(FnSig { + abi: FnAbi::RustCall, + c_variadic: false, + safety: Safety::Safe, + inputs_and_output, + })) +} + +fn fn_sig_for_enum_variant_constructor<'db>( + db: &'db dyn HirDatabase, + def: EnumVariantId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + let field_tys = db.field_types_ns(def.into()); + let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let parent = def.lookup(db).parent; + let ret = type_for_adt(db, parent.into()).skip_binder(); + + let inputs_and_output = + Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret))); + EarlyBinder::bind(Binder::dummy(FnSig { + abi: FnAbi::RustCall, + c_variadic: false, + safety: Safety::Safe, + inputs_and_output, + })) +} + +// FIXME(next-solver): should merge this with `explicit_item_bounds` in some way +pub(crate) fn associated_ty_item_bounds<'db>( + db: &'db dyn HirDatabase, + type_alias: TypeAliasId, +) -> EarlyBinder<'db, BoundExistentialPredicates<'db>> { + let trait_ = match type_alias.lookup(db).container { + ItemContainerId::TraitId(t) => t, + _ => panic!("associated type not in trait"), + }; + + let type_alias_data = db.type_alias_signature(type_alias); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + type_alias.into(), + LifetimeElisionKind::AnonymousReportError, + ); + // FIXME: we should never create non-existential predicates in the first place + // For now, use an error type so we don't run into dummy binder issues + let self_ty = Ty::new_error(interner, ErrorGuaranteed); + + let mut bounds = Vec::new(); + for bound in &type_alias_data.bounds { + ctx.lower_type_bound(bound, self_ty, false).for_each(|pred| { + if let Some(bound) = pred + .kind() + .map_bound(|c| match c { + rustc_type_ir::ClauseKind::Trait(t) => { + let id = t.def_id(); + let is_auto = db.trait_signature(id.0).flags.contains(TraitFlags::AUTO); + if is_auto { + Some(ExistentialPredicate::AutoTrait(t.def_id())) + } else { + Some(ExistentialPredicate::Trait(ExistentialTraitRef::new_from_args( + interner, + t.def_id(), + GenericArgs::new_from_iter( + interner, + t.trait_ref.args.iter().skip(1), + ), + ))) + } + } + rustc_type_ir::ClauseKind::Projection(p) => Some( + ExistentialPredicate::Projection(ExistentialProjection::new_from_args( + interner, + p.def_id(), + GenericArgs::new_from_iter( + interner, + p.projection_term.args.iter().skip(1), + ), + p.term, + )), + ), + rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => None, + rustc_type_ir::ClauseKind::RegionOutlives(_) + | rustc_type_ir::ClauseKind::ConstArgHasType(_, _) + | rustc_type_ir::ClauseKind::WellFormed(_) + | rustc_type_ir::ClauseKind::ConstEvaluatable(_) + | rustc_type_ir::ClauseKind::HostEffect(_) + | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(), + }) + .transpose() + { + bounds.push(bound); + } + }); + } + + if !ctx.unsized_types.contains(&self_ty) { + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); + let sized_clause = Binder::dummy(ExistentialPredicate::Trait(ExistentialTraitRef::new( + interner, + trait_.into(), + [] as [crate::next_solver::GenericArg<'_>; 0], + ))); + bounds.push(sized_clause); + bounds.shrink_to_fit(); + } + + EarlyBinder::bind(BoundExistentialPredicates::new_from_iter(interner, bounds)) +} + +pub(crate) fn associated_type_by_name_including_super_traits<'db>( + db: &'db dyn HirDatabase, + trait_ref: TraitRef<'db>, + name: &Name, +) -> Option<(TraitRef<'db>, TypeAliasId)> { + let interner = DbInterner::new_with(db, None, None); + rustc_type_ir::elaborate::supertraits(interner, Binder::dummy(trait_ref)).find_map(|t| { + let trait_id = t.as_ref().skip_binder().def_id.0; + let assoc_type = trait_id.trait_items(db).associated_type_by_name(name)?; + Some((t.skip_binder(), assoc_type)) + }) +} + +pub fn associated_type_shorthand_candidates( + db: &dyn HirDatabase, + def: GenericDefId, + res: TypeNs, + mut cb: impl FnMut(&Name, TypeAliasId) -> bool, +) -> Option { + let interner = DbInterner::new_with(db, None, None); + named_associated_type_shorthand_candidates(interner, def, res, None, |name, _, id| { + cb(name, id).then_some(id) + }) +} + +#[tracing::instrument(skip(interner, check_alias))] +fn named_associated_type_shorthand_candidates<'db, R>( + interner: DbInterner<'db>, + // If the type parameter is defined in an impl and we're in a method, there + // might be additional where clauses to consider + def: GenericDefId, + res: TypeNs, + assoc_name: Option, + mut check_alias: impl FnMut(&Name, TraitRef<'db>, TypeAliasId) -> Option, +) -> Option { + let db = interner.db; + let mut search = |t: TraitRef<'db>| -> Option { + let trait_id = t.def_id.0; + let mut checked_traits = FxHashSet::default(); + let mut check_trait = |trait_id: TraitId| { + let name = &db.trait_signature(trait_id).name; + tracing::debug!(?trait_id, ?name); + if !checked_traits.insert(trait_id) { + return None; + } + let data = trait_id.trait_items(db); + + tracing::debug!(?data.items); + for (name, assoc_id) in &data.items { + if let &AssocItemId::TypeAliasId(alias) = assoc_id + && let Some(ty) = check_alias(name, t, alias) + { + return Some(ty); + } + } + None + }; + let mut stack: SmallVec<[_; 4]> = smallvec![trait_id]; + while let Some(trait_def_id) = stack.pop() { + if let Some(alias) = check_trait(trait_def_id) { + return Some(alias); + } + for pred in generic_predicates_filtered_by( + db, + GenericDefId::TraitId(trait_def_id), + PredicateFilter::SelfTrait, + // We are likely in the midst of lowering generic predicates of `def`. + // So, if we allow `pred == def` we might fall into an infinite recursion. + // Actually, we have already checked for the case `pred == def` above as we started + // with a stack including `trait_id` + |pred| pred != def && pred == GenericDefId::TraitId(trait_def_id), + ) + .0 + .deref() + { + tracing::debug!(?pred); + let trait_id = match pred.kind().skip_binder() { + rustc_type_ir::ClauseKind::Trait(pred) => pred.def_id(), + _ => continue, + }; + stack.push(trait_id.0); + } + tracing::debug!(?stack); + } + + None + }; + + match res { + TypeNs::SelfType(impl_id) => { + let trait_ref = db.impl_trait_ns(impl_id)?; + + // FIXME(next-solver): same method in `lower` checks for impl or not + // Is that needed here? + + // we're _in_ the impl -- the binders get added back later. Correct, + // but it would be nice to make this more explicit + search(trait_ref.skip_binder()) + } + TypeNs::GenericParam(param_id) => { + // Handle `Self::Type` referring to own associated type in trait definitions + // This *must* be done first to avoid cycles with + // `generic_predicates_for_param`, but not sure that it's sufficient, + if let GenericDefId::TraitId(trait_id) = param_id.parent() { + let trait_name = &db.trait_signature(trait_id).name; + tracing::debug!(?trait_name); + let trait_generics = generics(db, trait_id.into()); + tracing::debug!(?trait_generics); + if trait_generics[param_id.local_id()].is_trait_self() { + let args = crate::next_solver::GenericArgs::identity_for_item( + interner, + trait_id.into(), + ); + let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), args); + tracing::debug!(?args, ?trait_ref); + return search(trait_ref); + } + } + + let predicates = + db.generic_predicates_for_param_ns(def, param_id.into(), assoc_name.clone()); + predicates + .iter() + .find_map(|pred| match (*pred).kind().skip_binder() { + rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate), + _ => None, + }) + .and_then(|trait_predicate| { + let trait_ref = trait_predicate.trait_ref; + assert!( + !trait_ref.has_escaping_bound_vars(), + "FIXME unexpected higher-ranked trait bound" + ); + search(trait_ref) + }) + } + _ => None, + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs new file mode 100644 index 0000000000000..7d6734303c48b --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs @@ -0,0 +1,1360 @@ +//! A wrapper around [`TyLoweringContext`] specifically for lowering paths. + +use std::ops::Deref; + +use either::Either; +use hir_def::{ + AssocItemId, GenericDefId, GenericParamId, Lookup, TraitId, TypeAliasId, + builtin_type::BuiltinType, + expr_store::{ + ExpressionStore, HygieneId, + path::{GenericArg, GenericArgs, GenericArgsParentheses, Path, PathSegment, PathSegments}, + }, + hir::generics::{ + GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance, + }, + resolver::{ResolveValueResult, TypeNs, ValueNs}, + signatures::TraitFlags, + type_ref::{TypeRef, TypeRefId}, +}; +use hir_expand::name::Name; +use intern::sym; +use rustc_hash::FxHashSet; +use rustc_type_ir::{ + AliasTerm, AliasTy, AliasTyKind, TypeVisitableExt, + inherent::{GenericArgs as _, IntoKind, Region as _, SliceLike, Ty as _}, +}; +use smallvec::{SmallVec, smallvec}; +use stdx::never; + +use crate::{ + GenericArgsProhibitedReason, IncorrectGenericsLenKind, PathGenericsSource, + PathLoweringDiagnostic, TyDefId, ValueTyDefId, + consteval_nextsolver::{unknown_const, unknown_const_as_generic}, + db::HirDatabase, + generics::{Generics, generics}, + lower::PathDiagnosticCallbackData, + lower_nextsolver::{ + LifetimeElisionKind, PredicateFilter, generic_predicates_filtered_by, + named_associated_type_shorthand_candidates, + }, + next_solver::{ + AdtDef, Binder, Clause, Const, DbInterner, ErrorGuaranteed, Predicate, ProjectionPredicate, + Region, SolverDefId, TraitRef, Ty, + mapping::{ChalkToNextSolver, convert_binder_to_early_binder}, + }, + primitive, +}; + +use super::{ + ImplTraitLoweringMode, TyLoweringContext, associated_type_by_name_including_super_traits, + const_param_ty_query, ty_query, +}; + +type CallbackData<'a> = + Either>; + +// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box` +// because of the allocation, so we create a lifetime-less callback, tailored for our needs. +pub(crate) struct PathDiagnosticCallback<'a, 'db> { + pub(crate) data: CallbackData<'a>, + pub(crate) callback: + fn(&CallbackData<'_>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic), +} + +pub(crate) struct PathLoweringContext<'a, 'b, 'db> { + ctx: &'a mut TyLoweringContext<'db, 'b>, + on_diagnostic: PathDiagnosticCallback<'a, 'db>, + path: &'a Path, + segments: PathSegments<'a>, + current_segment_idx: usize, + /// Contains the previous segment if `current_segment_idx == segments.len()` + current_or_prev_segment: PathSegment<'a>, +} + +impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { + #[inline] + pub(crate) fn new( + ctx: &'a mut TyLoweringContext<'db, 'b>, + on_diagnostic: PathDiagnosticCallback<'a, 'db>, + path: &'a Path, + ) -> Self { + let segments = path.segments(); + let first_segment = segments.first().unwrap_or(PathSegment::MISSING); + Self { + ctx, + on_diagnostic, + path, + segments, + current_segment_idx: 0, + current_or_prev_segment: first_segment, + } + } + + #[inline] + #[cold] + fn on_diagnostic(&mut self, diag: PathLoweringDiagnostic) { + (self.on_diagnostic.callback)(&self.on_diagnostic.data, self.ctx, diag); + } + + #[inline] + pub(crate) fn ty_ctx(&mut self) -> &mut TyLoweringContext<'db, 'b> { + self.ctx + } + + #[inline] + fn current_segment_u32(&self) -> u32 { + self.current_segment_idx as u32 + } + + #[inline] + fn skip_resolved_segment(&mut self) { + if !matches!(self.path, Path::LangItem(..)) { + // In lang items, the resolved "segment" is not one of the segments. Perhaps we should've put it + // point at -1, but I don't feel this is clearer. + self.current_segment_idx += 1; + } + self.update_current_segment(); + } + + #[inline] + fn update_current_segment(&mut self) { + self.current_or_prev_segment = + self.segments.get(self.current_segment_idx).unwrap_or(self.current_or_prev_segment); + } + + #[inline] + pub(crate) fn ignore_last_segment(&mut self) { + self.segments = self.segments.strip_last(); + } + + #[inline] + pub(crate) fn set_current_segment(&mut self, segment: usize) { + self.current_segment_idx = segment; + self.current_or_prev_segment = self + .segments + .get(segment) + .expect("invalid segment passed to PathLoweringContext::set_current_segment()"); + } + + #[inline] + fn with_lifetime_elision( + &mut self, + lifetime_elision: LifetimeElisionKind<'db>, + f: impl FnOnce(&mut PathLoweringContext<'_, '_, 'db>) -> T, + ) -> T { + let old_lifetime_elision = + std::mem::replace(&mut self.ctx.lifetime_elision, lifetime_elision); + let result = f(self); + self.ctx.lifetime_elision = old_lifetime_elision; + result + } + + pub(crate) fn lower_ty_relative_path( + &mut self, + ty: Ty<'db>, + // We need the original resolution to lower `Self::AssocTy` correctly + res: Option, + ) -> (Ty<'db>, Option) { + let remaining_segments = self.segments.len() - self.current_segment_idx; + match remaining_segments { + 0 => (ty, res), + 1 => { + // resolve unselected assoc types + (self.select_associated_type(res), None) + } + _ => { + // FIXME report error (ambiguous associated type) + (Ty::new_error(self.ctx.interner, ErrorGuaranteed), None) + } + } + } + + fn prohibit_parenthesized_generic_args(&mut self) -> bool { + if let Some(generic_args) = self.current_or_prev_segment.args_and_bindings { + match generic_args.parenthesized { + GenericArgsParentheses::No => {} + GenericArgsParentheses::ReturnTypeNotation | GenericArgsParentheses::ParenSugar => { + let segment = self.current_segment_u32(); + self.on_diagnostic( + PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, + ); + return true; + } + } + } + false + } + + // When calling this, the current segment is the resolved segment (we don't advance it yet). + pub(crate) fn lower_partly_resolved_path( + &mut self, + resolution: TypeNs, + infer_args: bool, + ) -> (Ty<'db>, Option) { + let remaining_segments = self.segments.skip(self.current_segment_idx + 1); + tracing::debug!(?remaining_segments); + let rem_seg_len = remaining_segments.len(); + tracing::debug!(?rem_seg_len); + + let ty = match resolution { + TypeNs::TraitId(trait_) => { + let ty = match remaining_segments.len() { + 1 => { + let trait_ref = self.lower_trait_ref_from_resolved_path( + trait_, + Ty::new_error(self.ctx.interner, ErrorGuaranteed), + ); + tracing::debug!(?trait_ref); + self.skip_resolved_segment(); + let segment = self.current_or_prev_segment; + let trait_id = trait_ref.def_id.0; + let found = + trait_id.trait_items(self.ctx.db).associated_type_by_name(segment.name); + + tracing::debug!(?found); + match found { + Some(associated_ty) => { + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`trait_ref.substitution`). + let substitution = self.substs_from_path_segment( + associated_ty.into(), + false, + None, + true, + ); + let args = crate::next_solver::GenericArgs::new_from_iter( + self.ctx.interner, + trait_ref + .args + .iter() + .chain(substitution.iter().skip(trait_ref.args.len())), + ); + Ty::new_alias( + self.ctx.interner, + AliasTyKind::Projection, + AliasTy::new_from_args( + self.ctx.interner, + associated_ty.into(), + args, + ), + ) + } + None => { + // FIXME: report error (associated type not found) + Ty::new_error(self.ctx.interner, ErrorGuaranteed) + } + } + } + 0 => { + // Trait object type without dyn; this should be handled in upstream. See + // `lower_path()`. + stdx::never!("unexpected fully resolved trait path"); + Ty::new_error(self.ctx.interner, ErrorGuaranteed) + } + _ => { + // FIXME report error (ambiguous associated type) + Ty::new_error(self.ctx.interner, ErrorGuaranteed) + } + }; + return (ty, None); + } + TypeNs::GenericParam(param_id) => { + let generics = self.ctx.generics(); + let idx = generics.type_or_const_param_idx(param_id.into()); + match idx { + None => { + never!("no matching generics"); + Ty::new_error(self.ctx.interner, ErrorGuaranteed) + } + Some(idx) => { + let (pidx, param) = generics.iter().nth(idx).unwrap(); + assert_eq!(pidx, param_id.into()); + let p = match param { + GenericParamDataRef::TypeParamData(p) => p, + _ => unreachable!(), + }; + Ty::new_param( + self.ctx.interner, + param_id, + idx as u32, + p.name + .as_ref() + .map_or_else(|| sym::MISSING_NAME.clone(), |p| p.symbol().clone()), + ) + } + } + } + TypeNs::SelfType(impl_id) => self.ctx.db.impl_self_ty_ns(impl_id).skip_binder(), + TypeNs::AdtSelfType(adt) => { + let args = crate::next_solver::GenericArgs::identity_for_item( + self.ctx.interner, + adt.into(), + ); + Ty::new_adt(self.ctx.interner, AdtDef::new(adt, self.ctx.interner), args) + } + + TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args), + TypeNs::BuiltinType(it) => self.lower_path_inner(it.into(), infer_args), + TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args), + // FIXME: report error + TypeNs::EnumVariantId(_) | TypeNs::ModuleId(_) => { + return (Ty::new_error(self.ctx.interner, ErrorGuaranteed), None); + } + }; + + tracing::debug!(?ty); + + self.skip_resolved_segment(); + self.lower_ty_relative_path(ty, Some(resolution)) + } + + fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) { + let mut prohibit_generics_on_resolved = |reason| { + if self.current_or_prev_segment.args_and_bindings.is_some() { + let segment = self.current_segment_u32(); + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment, + reason, + }); + } + }; + + match resolution { + TypeNs::SelfType(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + } + TypeNs::GenericParam(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam) + } + TypeNs::AdtSelfType(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + } + TypeNs::BuiltinType(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy) + } + TypeNs::ModuleId(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Module) + } + TypeNs::AdtId(_) + | TypeNs::EnumVariantId(_) + | TypeNs::TypeAliasId(_) + | TypeNs::TraitId(_) => {} + } + } + + pub(crate) fn resolve_path_in_type_ns_fully(&mut self) -> Option { + let (res, unresolved) = self.resolve_path_in_type_ns()?; + if unresolved.is_some() { + return None; + } + Some(res) + } + + #[tracing::instrument(skip(self), ret)] + pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option)> { + let (resolution, remaining_index, _, prefix_info) = + self.ctx.resolver.resolve_path_in_type_ns_with_prefix_info(self.ctx.db, self.path)?; + + let segments = self.segments; + if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { + // `segments.is_empty()` can occur with `self`. + return Some((resolution, remaining_index)); + } + + let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index { + None if prefix_info.enum_variant => { + (segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2)) + } + None => (segments.strip_last(), segments.len() - 1, None), + Some(i) => (segments.take(i - 1), i - 1, None), + }; + + self.current_segment_idx = resolved_segment_idx; + self.current_or_prev_segment = + segments.get(resolved_segment_idx).expect("should have resolved segment"); + + if matches!(self.path, Path::BarePath(..)) { + // Bare paths cannot have generics, so skip them as an optimization. + return Some((resolution, remaining_index)); + } + + for (i, mod_segment) in module_segments.iter().enumerate() { + if mod_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: i as u32, + reason: GenericArgsProhibitedReason::Module, + }); + } + } + + if let Some(enum_segment) = enum_segment + && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); + } + + self.handle_type_ns_resolution(&resolution); + + Some((resolution, remaining_index)) + } + + pub(crate) fn resolve_path_in_value_ns( + &mut self, + hygiene_id: HygieneId, + ) -> Option { + let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info( + self.ctx.db, + self.path, + hygiene_id, + )?; + + let segments = self.segments; + if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { + // `segments.is_empty()` can occur with `self`. + return Some(res); + } + + let (mod_segments, enum_segment, resolved_segment_idx) = match res { + ResolveValueResult::Partial(_, unresolved_segment, _) => { + (segments.take(unresolved_segment - 1), None, unresolved_segment - 1) + } + ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _) + if prefix_info.enum_variant => + { + (segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1) + } + ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1), + }; + + self.current_segment_idx = resolved_segment_idx; + self.current_or_prev_segment = + segments.get(resolved_segment_idx).expect("should have resolved segment"); + + for (i, mod_segment) in mod_segments.iter().enumerate() { + if mod_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: i as u32, + reason: GenericArgsProhibitedReason::Module, + }); + } + } + + if let Some(enum_segment) = enum_segment + && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); + } + + match &res { + ResolveValueResult::ValueNs(resolution, _) => { + let resolved_segment_idx = self.current_segment_u32(); + let resolved_segment = self.current_or_prev_segment; + + let mut prohibit_generics_on_resolved = |reason| { + if resolved_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: resolved_segment_idx, + reason, + }); + } + }; + + match resolution { + ValueNs::ImplSelf(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + } + // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not + // E0109 (generic arguments provided for a type that doesn't accept them) for + // consts and statics, presumably as a defense against future in which consts + // and statics can be generic, or just because it was easier for rustc implementors. + // That means we'll show the wrong error code. Because of us it's easier to do it + // this way :) + ValueNs::GenericParam(_) | ValueNs::ConstId(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const) + } + ValueNs::StaticId(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static) + } + ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {} + ValueNs::LocalBinding(_) => {} + } + } + ResolveValueResult::Partial(resolution, _, _) => { + self.handle_type_ns_resolution(resolution); + } + }; + Some(res) + } + + #[tracing::instrument(skip(self), ret)] + fn select_associated_type(&mut self, res: Option) -> Ty<'db> { + let interner = self.ctx.interner; + let Some(res) = res else { + return Ty::new_error(self.ctx.interner, ErrorGuaranteed); + }; + let db = self.ctx.db; + let def = self.ctx.def; + let segment = self.current_or_prev_segment; + let assoc_name = segment.name; + let mut check_alias = |name: &Name, t: TraitRef<'db>, associated_ty: TypeAliasId| { + if name != assoc_name { + return None; + } + + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`t.substitution`). + let substs = self.substs_from_path_segment(associated_ty.into(), false, None, true); + + let substs = crate::next_solver::GenericArgs::new_from_iter( + interner, + t.args.iter().chain(substs.iter().skip(t.args.len())), + ); + + Some(Ty::new_alias( + interner, + AliasTyKind::Projection, + AliasTy::new(interner, associated_ty.into(), substs), + )) + }; + named_associated_type_shorthand_candidates( + interner, + def, + res, + Some(assoc_name.clone()), + check_alias, + ) + .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)) + } + + fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty<'db> { + let generic_def = match typeable { + TyDefId::BuiltinType(builtinty) => return builtin(self.ctx.interner, builtinty), + TyDefId::AdtId(it) => it.into(), + TyDefId::TypeAliasId(it) => it.into(), + }; + let args = self.substs_from_path_segment(generic_def, infer_args, None, false); + let ty = ty_query(self.ctx.db, typeable); + ty.instantiate(self.ctx.interner, args) + } + + /// Collect generic arguments from a path into a `Substs`. See also + /// `create_substs_for_ast_path` and `def_to_ty` in rustc. + pub(crate) fn substs_from_path( + &mut self, + // Note that we don't call `db.value_type(resolved)` here, + // `ValueTyDefId` is just a convenient way to pass generics and + // special-case enum variants + resolved: ValueTyDefId, + infer_args: bool, + lowering_assoc_type_generics: bool, + ) -> crate::next_solver::GenericArgs<'db> { + let interner = self.ctx.interner; + let prev_current_segment_idx = self.current_segment_idx; + let prev_current_segment = self.current_or_prev_segment; + + let generic_def = match resolved { + ValueTyDefId::FunctionId(it) => it.into(), + ValueTyDefId::StructId(it) => it.into(), + ValueTyDefId::UnionId(it) => it.into(), + ValueTyDefId::ConstId(it) => it.into(), + ValueTyDefId::StaticId(_) => { + return crate::next_solver::GenericArgs::new_from_iter(interner, []); + } + ValueTyDefId::EnumVariantId(var) => { + // the generic args for an enum variant may be either specified + // on the segment referring to the enum, or on the segment + // referring to the variant. So `Option::::None` and + // `Option::None::` are both allowed (though the former is + // FIXME: This isn't strictly correct, enum variants may be used not through the enum + // (via `use Enum::Variant`). The resolver returns whether they were, but we don't have its result + // available here. The worst that can happen is that we will show some confusing diagnostics to the user, + // if generics exist on the module and they don't match with the variant. + // preferred). See also `def_ids_for_path_segments` in rustc. + // + // `wrapping_sub(1)` will return a number which `get` will return None for if current_segment_idx<2. + // This simplifies the code a bit. + let penultimate_idx = self.current_segment_idx.wrapping_sub(1); + let penultimate = self.segments.get(penultimate_idx); + if let Some(penultimate) = penultimate + && self.current_or_prev_segment.args_and_bindings.is_none() + && penultimate.args_and_bindings.is_some() + { + self.current_segment_idx = penultimate_idx; + self.current_or_prev_segment = penultimate; + } + var.lookup(self.ctx.db).parent.into() + } + }; + let result = self.substs_from_path_segment( + generic_def, + infer_args, + None, + lowering_assoc_type_generics, + ); + self.current_segment_idx = prev_current_segment_idx; + self.current_or_prev_segment = prev_current_segment; + result + } + + pub(crate) fn substs_from_path_segment( + &mut self, + def: GenericDefId, + infer_args: bool, + explicit_self_ty: Option>, + lowering_assoc_type_generics: bool, + ) -> crate::next_solver::GenericArgs<'db> { + let mut lifetime_elision = self.ctx.lifetime_elision.clone(); + + if let Some(args) = self.current_or_prev_segment.args_and_bindings + && args.parenthesized != GenericArgsParentheses::No + { + let prohibit_parens = match def { + GenericDefId::TraitId(trait_) => { + // RTN is prohibited anyways if we got here. + let is_rtn = args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; + let is_fn_trait = self + .ctx + .db + .trait_signature(trait_) + .flags + .contains(TraitFlags::RUSTC_PAREN_SUGAR); + is_rtn || !is_fn_trait + } + _ => true, + }; + + if prohibit_parens { + let segment = self.current_segment_u32(); + self.on_diagnostic( + PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, + ); + + return unknown_subst(self.ctx.interner, def); + } + + // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. + lifetime_elision = + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; + } + + self.substs_from_args_and_bindings( + self.current_or_prev_segment.args_and_bindings, + def, + infer_args, + explicit_self_ty, + PathGenericsSource::Segment(self.current_segment_u32()), + lowering_assoc_type_generics, + lifetime_elision, + ) + } + + pub(super) fn substs_from_args_and_bindings( + &mut self, + args_and_bindings: Option<&GenericArgs>, + def: GenericDefId, + infer_args: bool, + explicit_self_ty: Option>, + generics_source: PathGenericsSource, + lowering_assoc_type_generics: bool, + lifetime_elision: LifetimeElisionKind<'db>, + ) -> crate::next_solver::GenericArgs<'db> { + struct LowererCtx<'a, 'b, 'c, 'db> { + ctx: &'a mut PathLoweringContext<'b, 'c, 'db>, + generics_source: PathGenericsSource, + } + + impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, '_, 'db> { + fn report_len_mismatch( + &mut self, + def: GenericDefId, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + ) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsLen { + generics_source: self.generics_source, + provided_count, + expected_count, + kind, + def, + }); + } + + fn report_arg_mismatch( + &mut self, + param_id: GenericParamId, + arg_idx: u32, + has_self_arg: bool, + ) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsOrder { + generics_source: self.generics_source, + param_id, + arg_idx, + has_self_arg, + }); + } + + fn provided_kind( + &mut self, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + arg: &GenericArg, + ) -> crate::next_solver::GenericArg<'db> { + match (param, arg) { + (GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => { + self.ctx.ctx.lower_lifetime(*lifetime).into() + } + (GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => { + self.ctx.ctx.lower_ty(*type_ref).into() + } + (GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => { + let GenericParamId::ConstParamId(const_id) = param_id else { + unreachable!("non-const param ID for const param"); + }; + self.ctx + .ctx + .lower_const(konst, const_param_ty_query(self.ctx.ctx.db, const_id)) + .into() + } + _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"), + } + } + + fn provided_type_like_const( + &mut self, + const_ty: Ty<'db>, + arg: TypeLikeConst<'_>, + ) -> crate::next_solver::Const<'db> { + match arg { + TypeLikeConst::Path(path) => self.ctx.ctx.lower_path_as_const(path, const_ty), + TypeLikeConst::Infer => unknown_const(const_ty), + } + } + + fn inferred_kind( + &mut self, + def: GenericDefId, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + infer_args: bool, + preceding_args: &[crate::next_solver::GenericArg<'db>], + ) -> crate::next_solver::GenericArg<'db> { + let default = || { + self.ctx.ctx.db.generic_defaults(def).get(preceding_args.len()).map(|default| { + convert_binder_to_early_binder( + self.ctx.ctx.interner, + def, + default.to_nextsolver(self.ctx.ctx.interner), + ) + .instantiate(self.ctx.ctx.interner, preceding_args) + }) + }; + match param { + GenericParamDataRef::LifetimeParamData(_) => { + Region::new(self.ctx.ctx.interner, rustc_type_ir::ReError(ErrorGuaranteed)) + .into() + } + GenericParamDataRef::TypeParamData(param) => { + if !infer_args + && param.default.is_some() + && let Some(default) = default() + { + return default; + } + Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into() + } + GenericParamDataRef::ConstParamData(param) => { + if !infer_args + && param.default.is_some() + && let Some(default) = default() + { + return default; + } + let GenericParamId::ConstParamId(const_id) = param_id else { + unreachable!("non-const param ID for const param"); + }; + unknown_const_as_generic(const_param_ty_query(self.ctx.ctx.db, const_id)) + } + } + } + + fn parent_arg( + &mut self, + param_id: GenericParamId, + ) -> crate::next_solver::GenericArg<'db> { + match param_id { + GenericParamId::TypeParamId(_) => { + Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into() + } + GenericParamId::ConstParamId(const_id) => { + unknown_const_as_generic(const_param_ty_query(self.ctx.ctx.db, const_id)) + } + GenericParamId::LifetimeParamId(_) => { + Region::new(self.ctx.ctx.interner, rustc_type_ir::ReError(ErrorGuaranteed)) + .into() + } + } + } + + fn report_elided_lifetimes_in_path( + &mut self, + def: GenericDefId, + expected_count: u32, + hard_error: bool, + ) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::ElidedLifetimesInPath { + generics_source: self.generics_source, + def, + expected_count, + hard_error, + }); + } + + fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::ElisionFailure { + generics_source: self.generics_source, + def, + expected_count, + }); + } + + fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::MissingLifetime { + generics_source: self.generics_source, + def, + expected_count, + }); + } + } + + substs_from_args_and_bindings( + self.ctx.db, + self.ctx.store, + args_and_bindings, + def, + infer_args, + lifetime_elision, + lowering_assoc_type_generics, + explicit_self_ty, + &mut LowererCtx { ctx: self, generics_source }, + ) + } + + pub(crate) fn lower_trait_ref_from_resolved_path( + &mut self, + resolved: TraitId, + explicit_self_ty: Ty<'db>, + ) -> TraitRef<'db> { + let args = self.trait_ref_substs_from_path(resolved, explicit_self_ty); + TraitRef::new_from_args(self.ctx.interner, resolved.into(), args) + } + + fn trait_ref_substs_from_path( + &mut self, + resolved: TraitId, + explicit_self_ty: Ty<'db>, + ) -> crate::next_solver::GenericArgs<'db> { + self.substs_from_path_segment(resolved.into(), false, Some(explicit_self_ty), false) + } + + pub(super) fn assoc_type_bindings_from_type_bound<'c>( + mut self, + trait_ref: TraitRef<'db>, + ) -> Option> + use<'a, 'b, 'c, 'db>> { + let interner = self.ctx.interner; + self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| { + args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| { + let found = associated_type_by_name_including_super_traits( + self.ctx.db, + trait_ref, + &binding.name, + ); + let (super_trait_ref, associated_ty) = match found { + None => return SmallVec::new(), + Some(t) => t, + }; + let args = + self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| { + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`super_trait_ref.substitution`). + this.substs_from_args_and_bindings( + binding.args.as_ref(), + associated_ty.into(), + false, // this is not relevant + Some(super_trait_ref.self_ty()), + PathGenericsSource::AssocType { + segment: this.current_segment_u32(), + assoc_type: binding_idx as u32, + }, + false, + this.ctx.lifetime_elision.clone(), + ) + }); + let args = crate::next_solver::GenericArgs::new_from_iter( + interner, + super_trait_ref.args.iter().chain(args.iter().skip(super_trait_ref.args.len())), + ); + let projection_term = + AliasTerm::new_from_args(interner, associated_ty.into(), args); + let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity( + binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), + ); + if let Some(type_ref) = binding.type_ref { + match (&self.ctx.store[type_ref], self.ctx.impl_trait_mode.mode) { + (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (), + (_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => { + let ty = self.ctx.lower_ty(type_ref); + let pred = Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Projection(ProjectionPredicate { + projection_term, + term: ty.into(), + }), + )), + )); + predicates.push(pred); + } + } + } + for bound in binding.bounds.iter() { + predicates.extend(self.ctx.lower_type_bound( + bound, + Ty::new_alias( + self.ctx.interner, + AliasTyKind::Projection, + AliasTy::new_from_args(self.ctx.interner, associated_ty.into(), args), + ), + false, + )); + } + predicates + }) + }) + } +} + +/// A const that were parsed like a type. +pub(crate) enum TypeLikeConst<'a> { + Infer, + Path(&'a Path), +} + +pub(crate) trait GenericArgsLowerer<'db> { + fn report_elided_lifetimes_in_path( + &mut self, + def: GenericDefId, + expected_count: u32, + hard_error: bool, + ); + + fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32); + + fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32); + + fn report_len_mismatch( + &mut self, + def: GenericDefId, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + ); + + fn report_arg_mismatch(&mut self, param_id: GenericParamId, arg_idx: u32, has_self_arg: bool); + + fn provided_kind( + &mut self, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + arg: &GenericArg, + ) -> crate::next_solver::GenericArg<'db>; + + fn provided_type_like_const(&mut self, const_ty: Ty<'db>, arg: TypeLikeConst<'_>) + -> Const<'db>; + + fn inferred_kind( + &mut self, + def: GenericDefId, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + infer_args: bool, + preceding_args: &[crate::next_solver::GenericArg<'db>], + ) -> crate::next_solver::GenericArg<'db>; + + fn parent_arg(&mut self, param_id: GenericParamId) -> crate::next_solver::GenericArg<'db>; +} + +/// Returns true if there was an error. +fn check_generic_args_len<'db>( + args_and_bindings: Option<&GenericArgs>, + def: GenericDefId, + def_generics: &Generics, + infer_args: bool, + lifetime_elision: &LifetimeElisionKind<'db>, + lowering_assoc_type_generics: bool, + ctx: &mut impl GenericArgsLowerer<'db>, +) -> bool { + let mut had_error = false; + + let (mut provided_lifetimes_count, mut provided_types_and_consts_count) = (0usize, 0usize); + if let Some(args_and_bindings) = args_and_bindings { + let args_no_self = &args_and_bindings.args[usize::from(args_and_bindings.has_self_type)..]; + for arg in args_no_self { + match arg { + GenericArg::Lifetime(_) => provided_lifetimes_count += 1, + GenericArg::Type(_) | GenericArg::Const(_) => provided_types_and_consts_count += 1, + } + } + } + + let lifetime_args_len = def_generics.len_lifetimes_self(); + if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics { + // In generic associated types, we never allow inferring the lifetimes. + match lifetime_elision { + &LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => { + ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path); + had_error |= report_in_path; + } + LifetimeElisionKind::AnonymousReportError => { + ctx.report_missing_lifetime(def, lifetime_args_len as u32); + had_error = true + } + LifetimeElisionKind::ElisionFailure => { + ctx.report_elision_failure(def, lifetime_args_len as u32); + had_error = true; + } + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { + // FIXME: Check there are other lifetimes in scope, and error/lint. + } + LifetimeElisionKind::Elided(_) => { + ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, false); + } + LifetimeElisionKind::Infer => { + // Allow eliding lifetimes. + } + } + } else if lifetime_args_len != provided_lifetimes_count { + ctx.report_len_mismatch( + def, + provided_lifetimes_count as u32, + lifetime_args_len as u32, + IncorrectGenericsLenKind::Lifetimes, + ); + had_error = true; + } + + let defaults_count = + def_generics.iter_self_type_or_consts().filter(|(_, param)| param.has_default()).count(); + let named_type_and_const_params_count = def_generics + .iter_self_type_or_consts() + .filter(|(_, param)| match param { + TypeOrConstParamData::TypeParamData(param) => { + param.provenance == TypeParamProvenance::TypeParamList + } + TypeOrConstParamData::ConstParamData(_) => true, + }) + .count(); + let expected_max = named_type_and_const_params_count; + let expected_min = + if infer_args { 0 } else { named_type_and_const_params_count - defaults_count }; + if provided_types_and_consts_count < expected_min + || expected_max < provided_types_and_consts_count + { + ctx.report_len_mismatch( + def, + provided_types_and_consts_count as u32, + named_type_and_const_params_count as u32, + IncorrectGenericsLenKind::TypesAndConsts, + ); + had_error = true; + } + + had_error +} + +pub(crate) fn substs_from_args_and_bindings<'db>( + db: &'db dyn HirDatabase, + store: &ExpressionStore, + args_and_bindings: Option<&GenericArgs>, + def: GenericDefId, + mut infer_args: bool, + lifetime_elision: LifetimeElisionKind<'db>, + lowering_assoc_type_generics: bool, + explicit_self_ty: Option>, + ctx: &mut impl GenericArgsLowerer<'db>, +) -> crate::next_solver::GenericArgs<'db> { + let interner = DbInterner::new_with(db, None, None); + + tracing::debug!(?args_and_bindings); + + // Order is + // - Parent parameters + // - Optional Self parameter + // - Lifetime parameters + // - Type or Const parameters + let def_generics = generics(db, def); + let args_slice = args_and_bindings.map(|it| &*it.args).unwrap_or_default(); + + // We do not allow inference if there are specified args, i.e. we do not allow partial inference. + let has_non_lifetime_args = + args_slice.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))); + infer_args &= !has_non_lifetime_args; + + let had_count_error = check_generic_args_len( + args_and_bindings, + def, + &def_generics, + infer_args, + &lifetime_elision, + lowering_assoc_type_generics, + ctx, + ); + + let mut substs = Vec::with_capacity(def_generics.len()); + + substs.extend(def_generics.iter_parent_id().map(|id| ctx.parent_arg(id))); + + let mut args = args_slice.iter().enumerate().peekable(); + let mut params = def_generics.iter_self().peekable(); + + // If we encounter a type or const when we expect a lifetime, we infer the lifetimes. + // If we later encounter a lifetime, we know that the arguments were provided in the + // wrong order. `force_infer_lt` records the type or const that forced lifetimes to be + // inferred, so we can use it for diagnostics later. + let mut force_infer_lt = None; + + let has_self_arg = args_and_bindings.is_some_and(|it| it.has_self_type); + // First, handle `Self` parameter. Consume it from the args if provided, otherwise from `explicit_self_ty`, + // and lastly infer it. + if let Some(&( + self_param_id, + self_param @ GenericParamDataRef::TypeParamData(TypeParamData { + provenance: TypeParamProvenance::TraitSelf, + .. + }), + )) = params.peek() + { + let self_ty = if has_self_arg { + let (_, self_ty) = args.next().expect("has_self_type=true, should have Self type"); + ctx.provided_kind(self_param_id, self_param, self_ty) + } else { + explicit_self_ty.map(|it| it.into()).unwrap_or_else(|| { + ctx.inferred_kind(def, self_param_id, self_param, infer_args, &substs) + }) + }; + params.next(); + substs.push(self_ty); + } + + loop { + // We're going to iterate through the generic arguments that the user + // provided, matching them with the generic parameters we expect. + // Mismatches can occur as a result of elided lifetimes, or for malformed + // input. We try to handle both sensibly. + match (args.peek(), params.peek()) { + (Some(&(arg_idx, arg)), Some(&(param_id, param))) => match (arg, param) { + (GenericArg::Type(_), GenericParamDataRef::TypeParamData(type_param)) + if type_param.provenance == TypeParamProvenance::ArgumentImplTrait => + { + // Do not allow specifying `impl Trait` explicitly. We already err at that, but if we won't handle it here + // we will handle it as if it was specified, instead of inferring it. + substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs)); + params.next(); + } + (GenericArg::Lifetime(_), GenericParamDataRef::LifetimeParamData(_)) + | (GenericArg::Type(_), GenericParamDataRef::TypeParamData(_)) + | (GenericArg::Const(_), GenericParamDataRef::ConstParamData(_)) => { + substs.push(ctx.provided_kind(param_id, param, arg)); + args.next(); + params.next(); + } + ( + GenericArg::Type(_) | GenericArg::Const(_), + GenericParamDataRef::LifetimeParamData(_), + ) => { + // We expected a lifetime argument, but got a type or const + // argument. That means we're inferring the lifetime. + substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs)); + params.next(); + force_infer_lt = Some((arg_idx as u32, param_id)); + } + (GenericArg::Type(type_ref), GenericParamDataRef::ConstParamData(_)) => { + if let Some(konst) = type_looks_like_const(store, *type_ref) { + let GenericParamId::ConstParamId(param_id) = param_id else { + panic!("unmatching param kinds"); + }; + let const_ty = const_param_ty_query(db, param_id); + substs.push(ctx.provided_type_like_const(const_ty, konst).into()); + args.next(); + params.next(); + } else { + // See the `_ => { ... }` branch. + if !had_count_error { + ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg); + } + while args.next().is_some() {} + } + } + _ => { + // We expected one kind of parameter, but the user provided + // another. This is an error. However, if we already know that + // the arguments don't match up with the parameters, we won't issue + // an additional error, as the user already knows what's wrong. + if !had_count_error { + ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg); + } + + // We've reported the error, but we want to make sure that this + // problem doesn't bubble down and create additional, irrelevant + // errors. In this case, we're simply going to ignore the argument + // and any following arguments. The rest of the parameters will be + // inferred. + while args.next().is_some() {} + } + }, + + (Some(&(_, arg)), None) => { + // We should never be able to reach this point with well-formed input. + // There are two situations in which we can encounter this issue. + // + // 1. The number of arguments is incorrect. In this case, an error + // will already have been emitted, and we can ignore it. + // 2. We've inferred some lifetimes, which have been provided later (i.e. + // after a type or const). We want to throw an error in this case. + if !had_count_error { + assert!( + matches!(arg, GenericArg::Lifetime(_)), + "the only possible situation here is incorrect lifetime order" + ); + let (provided_arg_idx, param_id) = + force_infer_lt.expect("lifetimes ought to have been inferred"); + ctx.report_arg_mismatch(param_id, provided_arg_idx, has_self_arg); + } + + break; + } + + (None, Some(&(param_id, param))) => { + // If there are fewer arguments than parameters, it means we're inferring the remaining arguments. + let param = if let GenericParamId::LifetimeParamId(_) = param_id { + match &lifetime_elision { + LifetimeElisionKind::ElisionFailure + | LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true } + | LifetimeElisionKind::AnonymousReportError => { + assert!(had_count_error); + ctx.inferred_kind(def, param_id, param, infer_args, &substs) + } + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { + Region::new_static(interner).into() + } + LifetimeElisionKind::Elided(lifetime) => (*lifetime).into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false } + | LifetimeElisionKind::Infer => { + // FIXME: With `AnonymousCreateParameter`, we need to create a new lifetime parameter here + // (but this will probably be done in hir-def lowering instead). + ctx.inferred_kind(def, param_id, param, infer_args, &substs) + } + } + } else { + ctx.inferred_kind(def, param_id, param, infer_args, &substs) + }; + substs.push(param); + params.next(); + } + + (None, None) => break, + } + } + + crate::next_solver::GenericArgs::new_from_iter(interner, substs) +} + +fn type_looks_like_const( + store: &ExpressionStore, + type_ref: TypeRefId, +) -> Option> { + // A path/`_` const will be parsed as a type, instead of a const, because when parsing/lowering + // in hir-def we don't yet know the expected argument kind. rustc does this a bit differently, + // when lowering to HIR it resolves the path, and if it doesn't resolve to the type namespace + // it is lowered as a const. Our behavior could deviate from rustc when the value is resolvable + // in both the type and value namespaces, but I believe we only allow more code. + let type_ref = &store[type_ref]; + match type_ref { + TypeRef::Path(path) => Some(TypeLikeConst::Path(path)), + TypeRef::Placeholder => Some(TypeLikeConst::Infer), + _ => None, + } +} + +fn unknown_subst<'db>( + interner: DbInterner<'db>, + def: impl Into, +) -> crate::next_solver::GenericArgs<'db> { + let params = generics(interner.db(), def.into()); + crate::next_solver::GenericArgs::new_from_iter( + interner, + params.iter_id().map(|id| match id { + GenericParamId::TypeParamId(_) => Ty::new_error(interner, ErrorGuaranteed).into(), + GenericParamId::ConstParamId(id) => { + unknown_const_as_generic(const_param_ty_query(interner.db(), id)) + } + GenericParamId::LifetimeParamId(_) => { + crate::next_solver::Region::error(interner).into() + } + }), + ) +} + +pub(crate) fn builtin<'db>(interner: DbInterner<'db>, builtin: BuiltinType) -> Ty<'db> { + match builtin { + BuiltinType::Char => Ty::new(interner, rustc_type_ir::TyKind::Char), + BuiltinType::Bool => Ty::new_bool(interner), + BuiltinType::Str => Ty::new(interner, rustc_type_ir::TyKind::Str), + BuiltinType::Int(t) => { + let int_ty = match primitive::int_ty_from_builtin(t) { + chalk_ir::IntTy::Isize => rustc_type_ir::IntTy::Isize, + chalk_ir::IntTy::I8 => rustc_type_ir::IntTy::I8, + chalk_ir::IntTy::I16 => rustc_type_ir::IntTy::I16, + chalk_ir::IntTy::I32 => rustc_type_ir::IntTy::I32, + chalk_ir::IntTy::I64 => rustc_type_ir::IntTy::I64, + chalk_ir::IntTy::I128 => rustc_type_ir::IntTy::I128, + }; + Ty::new_int(interner, int_ty) + } + BuiltinType::Uint(t) => { + let uint_ty = match primitive::uint_ty_from_builtin(t) { + chalk_ir::UintTy::Usize => rustc_type_ir::UintTy::Usize, + chalk_ir::UintTy::U8 => rustc_type_ir::UintTy::U8, + chalk_ir::UintTy::U16 => rustc_type_ir::UintTy::U16, + chalk_ir::UintTy::U32 => rustc_type_ir::UintTy::U32, + chalk_ir::UintTy::U64 => rustc_type_ir::UintTy::U64, + chalk_ir::UintTy::U128 => rustc_type_ir::UintTy::U128, + }; + Ty::new_uint(interner, uint_ty) + } + BuiltinType::Float(t) => { + let float_ty = match primitive::float_ty_from_builtin(t) { + chalk_ir::FloatTy::F16 => rustc_type_ir::FloatTy::F16, + chalk_ir::FloatTy::F32 => rustc_type_ir::FloatTy::F32, + chalk_ir::FloatTy::F64 => rustc_type_ir::FloatTy::F64, + chalk_ir::FloatTy::F128 => rustc_type_ir::FloatTy::F128, + }; + Ty::new_float(interner, float_ty) + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs index 9d3d2044c43e4..5125a38825cb8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs @@ -3,8 +3,6 @@ //! Chalk (in both directions); plus some helper functions for more specialized //! conversions. -use chalk_solve::rust_ir; - use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId}; use salsa::{ Id, @@ -54,23 +52,6 @@ impl ToChalk for CallableDefId { } } -pub(crate) struct TypeAliasAsValue(pub(crate) TypeAliasId); - -impl ToChalk for TypeAliasAsValue { - type Chalk = chalk_db::AssociatedTyValueId; - - fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId { - rust_ir::AssociatedTyValueId(self.0.as_id()) - } - - fn from_chalk( - _db: &dyn HirDatabase, - assoc_ty_value_id: chalk_db::AssociatedTyValueId, - ) -> TypeAliasAsValue { - TypeAliasAsValue(TypeAliasId::from_id(assoc_ty_value_id.0)) - } -} - impl From for crate::db::InternedOpaqueTyId { fn from(id: OpaqueTyId) -> Self { FromId::from_id(id.0) @@ -123,7 +104,10 @@ pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId { FromId::from_id(id.0) } -pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId { +pub fn from_placeholder_idx( + db: &dyn HirDatabase, + idx: PlaceholderIndex, +) -> (TypeOrConstParamId, u32) { assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT); // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound. let interned_id = @@ -131,15 +115,32 @@ pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> Type interned_id.loc(db) } -pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex { - let interned_id = InternedTypeOrConstParamId::new(db, id); +pub fn to_placeholder_idx( + db: &dyn HirDatabase, + id: TypeOrConstParamId, + idx: u32, +) -> PlaceholderIndex { + let interned_id = InternedTypeOrConstParamId::new(db, (id, idx)); PlaceholderIndex { ui: chalk_ir::UniverseIndex::ROOT, idx: interned_id.as_id().index() as usize, } } -pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId { +pub fn to_placeholder_idx_no_index( + db: &dyn HirDatabase, + id: TypeOrConstParamId, +) -> PlaceholderIndex { + let index = crate::generics::generics(db, id.parent) + .type_or_const_param_idx(id) + .expect("param not found"); + to_placeholder_idx(db, id, index as u32) +} + +pub fn lt_from_placeholder_idx( + db: &dyn HirDatabase, + idx: PlaceholderIndex, +) -> (LifetimeParamId, u32) { assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT); // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound. let interned_id = @@ -147,8 +148,12 @@ pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> L interned_id.loc(db) } -pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> PlaceholderIndex { - let interned_id = InternedLifetimeParamId::new(db, id); +pub fn lt_to_placeholder_idx( + db: &dyn HirDatabase, + id: LifetimeParamId, + idx: u32, +) -> PlaceholderIndex { + let interned_id = InternedLifetimeParamId::new(db, (id, idx)); PlaceholderIndex { ui: chalk_ir::UniverseIndex::ROOT, idx: interned_id.as_id().index() as usize, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index b22781e947013..7fa3d31fe5fdc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -16,22 +16,29 @@ use hir_def::{ use hir_expand::name::Name; use intern::sym; use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _}; use smallvec::{SmallVec, smallvec}; use stdx::never; use triomphe::Arc; use crate::{ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, GenericArgData, - Goal, Guidance, InEnvironment, Interner, Mutability, Scalar, Solution, Substitution, - TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, TyVariableKind, - VariableKind, WhereClause, + Goal, InEnvironment, Interner, Mutability, Scalar, Substitution, TraitEnvironment, TraitRef, + TraitRefExt, Ty, TyBuilder, TyExt, TyKind, VariableKind, WhereClause, autoderef::{self, AutoderefKind}, db::HirDatabase, - error_lifetime, from_chalk_trait_id, from_foreign_def_id, + from_chalk_trait_id, from_foreign_def_id, infer::{Adjust, Adjustment, OverloadedDeref, PointerCast, unify::InferenceTable}, lang_items::is_box, + next_solver::{ + self, SolverDefId, + fulfill::FulfillmentCtxt, + infer::DefineOpaqueTypes, + mapping::{ChalkToNextSolver, NextSolverToChalk}, + }, primitive::{FloatTy, IntTy, UintTy}, to_chalk_trait_id, + traits::next_trait_solve_canonical_in_ctxt, utils::all_super_traits, }; @@ -43,6 +50,7 @@ pub enum TyFingerprint { Slice, Array, Never, + Ref(Mutability), RawPtr(Mutability), Scalar(Scalar), // These can have user-defined impls: @@ -88,7 +96,7 @@ impl TyFingerprint { TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability), TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id), TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?, - TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty), + TyKind::Ref(mutability, _, _) => TyFingerprint::Ref(*mutability), TyKind::Tuple(_, subst) => { let first_ty = subst.interned().first().map(|arg| arg.assert_ty_ref(Interner)); match first_ty { @@ -97,6 +105,11 @@ impl TyFingerprint { } } TyKind::AssociatedType(_, _) + // FIXME(next-solver): Putting `Alias` here is *probably* incorrect, AFAIK it should return `None`. But this breaks + // flyimport, which uses an incorrect but fast method resolution algorithm. Therefore we put it here, + // because this function is only called by flyimport, and anyway we should get rid of `TyFingerprint` + // and switch to `rustc_type_ir`'s `SimplifiedType`. + | TyKind::Alias(_) | TyKind::OpaqueType(_, _) | TyKind::FnDef(_, _) | TyKind::Closure(_, _) @@ -105,14 +118,94 @@ impl TyFingerprint { TyKind::Function(fn_ptr) => { TyFingerprint::Function(fn_ptr.substitution.0.len(Interner) as u32) } - TyKind::Alias(_) - | TyKind::Placeholder(_) + TyKind::Placeholder(_) | TyKind::BoundVar(_) | TyKind::InferenceVar(_, _) | TyKind::Error => return None, }; Some(fp) } + + /// Creates a TyFingerprint for looking up a trait impl. + pub fn for_trait_impl_ns<'db>(ty: &next_solver::Ty<'db>) -> Option { + use rustc_type_ir::TyKind; + let fp = match (*ty).kind() { + TyKind::Str => TyFingerprint::Str, + TyKind::Never => TyFingerprint::Never, + TyKind::Slice(..) => TyFingerprint::Slice, + TyKind::Array(..) => TyFingerprint::Array, + TyKind::Int(int) => TyFingerprint::Scalar(Scalar::Int(match int { + rustc_type_ir::IntTy::Isize => IntTy::Isize, + rustc_type_ir::IntTy::I8 => IntTy::I8, + rustc_type_ir::IntTy::I16 => IntTy::I16, + rustc_type_ir::IntTy::I32 => IntTy::I32, + rustc_type_ir::IntTy::I64 => IntTy::I64, + rustc_type_ir::IntTy::I128 => IntTy::I128, + })), + TyKind::Uint(uint) => TyFingerprint::Scalar(Scalar::Uint(match uint { + rustc_type_ir::UintTy::Usize => UintTy::Usize, + rustc_type_ir::UintTy::U8 => UintTy::U8, + rustc_type_ir::UintTy::U16 => UintTy::U16, + rustc_type_ir::UintTy::U32 => UintTy::U32, + rustc_type_ir::UintTy::U64 => UintTy::U64, + rustc_type_ir::UintTy::U128 => UintTy::U128, + })), + TyKind::Float(float) => TyFingerprint::Scalar(Scalar::Float(match float { + rustc_type_ir::FloatTy::F16 => FloatTy::F16, + rustc_type_ir::FloatTy::F32 => FloatTy::F32, + rustc_type_ir::FloatTy::F64 => FloatTy::F64, + rustc_type_ir::FloatTy::F128 => FloatTy::F128, + })), + TyKind::Bool => TyFingerprint::Scalar(Scalar::Bool), + TyKind::Char => TyFingerprint::Scalar(Scalar::Char), + TyKind::Adt(def, _) => TyFingerprint::Adt(def.inner().id), + TyKind::RawPtr(.., mutability) => match mutability { + rustc_ast_ir::Mutability::Mut => TyFingerprint::RawPtr(Mutability::Mut), + rustc_ast_ir::Mutability::Not => TyFingerprint::RawPtr(Mutability::Not), + }, + TyKind::Foreign(def) => TyFingerprint::ForeignType(crate::to_foreign_def_id(def.0)), + TyKind::Dynamic(bounds, _) => { + let trait_ref = bounds + .as_slice() + .iter() + .map(|b| (*b).skip_binder()) + .filter_map(|b| match b { + rustc_type_ir::ExistentialPredicate::Trait(t) => Some(t.def_id), + _ => None, + }) + .next()?; + TyFingerprint::Dyn(trait_ref.0) + } + TyKind::Ref(_, _, mutability) => match mutability { + rustc_ast_ir::Mutability::Mut => TyFingerprint::Ref(Mutability::Mut), + rustc_ast_ir::Mutability::Not => TyFingerprint::Ref(Mutability::Not), + }, + TyKind::Tuple(tys) => { + let first_ty = tys.as_slice().iter().next(); + match first_ty { + Some(ty) => return TyFingerprint::for_trait_impl_ns(ty), + None => TyFingerprint::Unit, + } + } + TyKind::FnDef(_, _) + | TyKind::Closure(_, _) + | TyKind::Coroutine(..) + | TyKind::CoroutineWitness(..) + | TyKind::Pat(..) + | TyKind::CoroutineClosure(..) => TyFingerprint::Unnameable, + TyKind::FnPtr(sig, _) => { + TyFingerprint::Function(sig.inputs().skip_binder().len() as u32) + } + TyKind::Alias(..) + | TyKind::Placeholder(_) + | TyKind::Bound(..) + | TyKind::Infer(_) + | TyKind::Error(_) + | TyKind::Param(..) + | TyKind::UnsafeBinder(..) => return None, + }; + Some(fp) + } } pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [ @@ -446,9 +539,9 @@ pub fn def_crates(db: &dyn HirDatabase, ty: &Ty, cur_crate: Crate) -> Option, +pub(crate) fn lookup_method<'db>( + db: &'db dyn HirDatabase, + ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, env: Arc, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, @@ -529,18 +622,23 @@ pub struct ReceiverAdjustments { } impl ReceiverAdjustments { - pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec) { - let mut ty = table.resolve_ty_shallow(&ty); + pub(crate) fn apply( + &self, + table: &mut InferenceTable<'_>, + mut ty: Ty, + ) -> (Ty, Vec) { let mut adjust = Vec::new(); + let mut autoderef = table.autoderef(ty.to_nextsolver(table.interner)); + autoderef.next(); for _ in 0..self.autoderefs { - match autoderef::autoderef_step(table, ty.clone(), true, false) { + match autoderef.next() { None => { never!("autoderef not possible for {:?}", ty); ty = TyKind::Error.intern(Interner); break; } - Some((kind, new_ty)) => { - ty = new_ty.clone(); + Some((new_ty, _)) => { + ty = new_ty.to_chalk(autoderef.table.interner); let mutbl = match self.autoref { Some(AutorefOrPtrAdjustment::Autoref(m)) => Some(m), Some(AutorefOrPtrAdjustment::ToConstPtr) => Some(Mutability::Not), @@ -548,11 +646,11 @@ impl ReceiverAdjustments { None => None, }; adjust.push(Adjustment { - kind: Adjust::Deref(match kind { + kind: Adjust::Deref(match autoderef.steps().last().unwrap().1 { AutoderefKind::Overloaded => Some(OverloadedDeref(mutbl)), AutoderefKind::Builtin => None, }), - target: new_ty, + target: ty.clone(), }); } } @@ -610,9 +708,9 @@ impl ReceiverAdjustments { // This would be nicer if it just returned an iterator, but that runs into // lifetime problems, because we need to borrow temp `CrateImplDefs`. // FIXME add a context type here? -pub(crate) fn iterate_method_candidates( - ty: &Canonical, - db: &dyn HirDatabase, +pub(crate) fn iterate_method_candidates<'db, T>( + ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, + db: &'db dyn HirDatabase, env: Arc, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, @@ -807,11 +905,17 @@ fn find_matching_impl( let wcs = crate::chalk_db::convert_where_clauses(db, impl_.into(), &impl_substs) .into_iter() - .map(|b| b.cast(Interner)); - let goal = crate::Goal::all(Interner, wcs); - table.try_obligation(goal.clone())?; - table.register_obligation(goal); - Some((impl_.impl_items(db), table.resolve_completely(impl_substs))) + .map(|b| -> Goal { b.cast(Interner) }); + for goal in wcs { + if table.try_obligation(goal.clone()).no_solution() { + return None; + } + table.register_obligation(goal.to_nextsolver(table.interner)); + } + Some(( + impl_.impl_items(db), + table.resolve_completely::<_, crate::next_solver::GenericArgs<'_>>(impl_substs), + )) }) }) } @@ -956,9 +1060,9 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool { is_not_orphan } -pub fn iterate_path_candidates( - ty: &Canonical, - db: &dyn HirDatabase, +pub fn iterate_path_candidates<'db>( + ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, + db: &'db dyn HirDatabase, env: Arc, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, @@ -978,9 +1082,9 @@ pub fn iterate_path_candidates( ) } -pub fn iterate_method_candidates_dyn( - ty: &Canonical, - db: &dyn HirDatabase, +pub fn iterate_method_candidates_dyn<'db>( + ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, + db: &'db dyn HirDatabase, env: Arc, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, @@ -1018,7 +1122,7 @@ pub fn iterate_method_candidates_dyn( // types*. let mut table = InferenceTable::new(db, env); - let ty = table.instantiate_canonical(ty.clone()); + let ty = table.instantiate_canonical_ns(*ty); let deref_chain = autoderef_method_receiver(&mut table, ty); deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| { @@ -1049,19 +1153,16 @@ pub fn iterate_method_candidates_dyn( } #[tracing::instrument(skip_all, fields(name = ?name))] -fn iterate_method_candidates_with_autoref( - table: &mut InferenceTable<'_>, - receiver_ty: Canonical, +fn iterate_method_candidates_with_autoref<'db>( + table: &mut InferenceTable<'db>, + receiver_ty: next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, first_adjustment: ReceiverAdjustments, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, callback: &mut dyn MethodCandidateCallback, ) -> ControlFlow<()> { - if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) { - // don't try to resolve methods on unknown types - return ControlFlow::Continue(()); - } + let interner = table.interner; let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| { iterate_method_candidates_by_receiver( @@ -1076,18 +1177,27 @@ fn iterate_method_candidates_with_autoref( }; let mut maybe_reborrowed = first_adjustment.clone(); - if let Some((_, _, m)) = receiver_ty.value.as_reference() { + if let rustc_type_ir::TyKind::Ref(_, _, m) = receiver_ty.value.kind() { + let m = match m { + rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, + rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, + }; // Prefer reborrow of references to move maybe_reborrowed.autoref = Some(AutorefOrPtrAdjustment::Autoref(m)); maybe_reborrowed.autoderefs += 1; } - iterate_method_candidates_by_receiver(receiver_ty.clone(), maybe_reborrowed)?; + iterate_method_candidates_by_receiver(receiver_ty, maybe_reborrowed)?; - let refed = Canonical { - value: TyKind::Ref(Mutability::Not, error_lifetime(), receiver_ty.value.clone()) - .intern(Interner), - binders: receiver_ty.binders.clone(), + let refed = next_solver::Canonical { + max_universe: receiver_ty.max_universe, + variables: receiver_ty.variables, + value: next_solver::Ty::new_ref( + interner, + next_solver::Region::error(interner), + receiver_ty.value, + rustc_ast_ir::Mutability::Not, + ), }; iterate_method_candidates_by_receiver( @@ -1095,10 +1205,15 @@ fn iterate_method_candidates_with_autoref( first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Not)), )?; - let ref_muted = Canonical { - value: TyKind::Ref(Mutability::Mut, error_lifetime(), receiver_ty.value.clone()) - .intern(Interner), - binders: receiver_ty.binders.clone(), + let ref_muted = next_solver::Canonical { + max_universe: receiver_ty.max_universe, + variables: receiver_ty.variables, + value: next_solver::Ty::new_ref( + interner, + next_solver::Region::error(interner), + receiver_ty.value, + rustc_ast_ir::Mutability::Mut, + ), }; iterate_method_candidates_by_receiver( @@ -1106,10 +1221,13 @@ fn iterate_method_candidates_with_autoref( first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Mut)), )?; - if let Some((ty, Mutability::Mut)) = receiver_ty.value.as_raw_ptr() { - let const_ptr_ty = Canonical { - value: TyKind::Raw(Mutability::Not, ty.clone()).intern(Interner), - binders: receiver_ty.binders, + if let rustc_type_ir::TyKind::RawPtr(ty, rustc_ast_ir::Mutability::Mut) = + receiver_ty.value.kind() + { + let const_ptr_ty = rustc_type_ir::Canonical { + max_universe: rustc_type_ir::UniverseIndex::ZERO, + value: next_solver::Ty::new_ptr(interner, ty, rustc_ast_ir::Mutability::Not), + variables: receiver_ty.variables, }; iterate_method_candidates_by_receiver( const_ptr_ty, @@ -1160,30 +1278,35 @@ where } #[tracing::instrument(skip_all, fields(name = ?name))] -fn iterate_method_candidates_by_receiver( - table: &mut InferenceTable<'_>, - receiver_ty: Canonical, +fn iterate_method_candidates_by_receiver<'db>( + table: &mut InferenceTable<'db>, + receiver_ty: next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, receiver_adjustments: ReceiverAdjustments, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, callback: &mut dyn MethodCandidateCallback, ) -> ControlFlow<()> { - let receiver_ty = table.instantiate_canonical(receiver_ty); + let interner = table.interner; + let receiver_ty = table.instantiate_canonical_ns(receiver_ty); + let receiver_ty: crate::Ty = receiver_ty.to_chalk(interner); // We're looking for methods with *receiver* type receiver_ty. These could // be found in any of the derefs of receiver_ty, so we have to go through // that, including raw derefs. table.run_in_snapshot(|table| { let mut autoderef = - autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true, true); + autoderef::Autoderef::new_no_tracking(table, receiver_ty.to_nextsolver(interner)) + .include_raw_pointers() + .use_receiver_trait(); while let Some((self_ty, _)) = autoderef.next() { iterate_inherent_methods( - &self_ty, + &self_ty.to_chalk(interner), autoderef.table, name, Some(&receiver_ty), Some(receiver_adjustments.clone()), visible_from_module, + LookupMode::MethodCall, &mut |adjustments, item, is_visible| { callback.on_inherent_method(adjustments, item, is_visible) }, @@ -1193,20 +1316,24 @@ fn iterate_method_candidates_by_receiver( })?; table.run_in_snapshot(|table| { let mut autoderef = - autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true, true); + autoderef::Autoderef::new_no_tracking(table, receiver_ty.to_nextsolver(interner)) + .include_raw_pointers() + .use_receiver_trait(); while let Some((self_ty, _)) = autoderef.next() { - if matches!(self_ty.kind(Interner), TyKind::InferenceVar(_, TyVariableKind::General)) { + if matches!(self_ty.kind(), crate::next_solver::TyKind::Infer(rustc_type_ir::TyVar(_))) + { // don't try to resolve methods on unknown types return ControlFlow::Continue(()); } iterate_trait_method_candidates( - &self_ty, + &self_ty.to_chalk(interner), autoderef.table, traits_in_scope, name, Some(&receiver_ty), Some(receiver_adjustments.clone()), + LookupMode::MethodCall, &mut |adjustments, item, is_visible| { callback.on_trait_method(adjustments, item, is_visible) }, @@ -1217,9 +1344,9 @@ fn iterate_method_candidates_by_receiver( } #[tracing::instrument(skip_all, fields(name = ?name))] -fn iterate_method_candidates_for_self_ty( - self_ty: &Canonical, - db: &dyn HirDatabase, +fn iterate_method_candidates_for_self_ty<'db>( + self_ty: &next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, + db: &'db dyn HirDatabase, env: Arc, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, @@ -1227,7 +1354,7 @@ fn iterate_method_candidates_for_self_ty( callback: &mut dyn MethodCandidateCallback, ) -> ControlFlow<()> { let mut table = InferenceTable::new(db, env); - let self_ty = table.instantiate_canonical(self_ty.clone()); + let self_ty = table.instantiate_canonical_ns(*self_ty).to_chalk(table.interner); iterate_inherent_methods( &self_ty, &mut table, @@ -1235,6 +1362,7 @@ fn iterate_method_candidates_for_self_ty( None, None, visible_from_module, + LookupMode::Path, &mut |adjustments, item, is_visible| { callback.on_inherent_method(adjustments, item, is_visible) }, @@ -1246,6 +1374,7 @@ fn iterate_method_candidates_for_self_ty( name, None, None, + LookupMode::Path, &mut |adjustments, item, is_visible| { callback.on_trait_method(adjustments, item, is_visible) }, @@ -1260,12 +1389,13 @@ fn iterate_trait_method_candidates( name: Option<&Name>, receiver_ty: Option<&Ty>, receiver_adjustments: Option, + mode: LookupMode, callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; - let canonical_self_ty = table.canonicalize(self_ty.clone()); - let TraitEnvironment { krate, block, .. } = *table.trait_env; + let canonical_self_ty = table.canonicalize(self_ty.clone().to_nextsolver(table.interner)); + let TraitEnvironment { krate, .. } = *table.trait_env; 'traits: for &t in traits_in_scope { let data = db.trait_signature(t); @@ -1305,15 +1435,22 @@ fn iterate_trait_method_candidates( for &(_, item) in t.trait_items(db).items.iter() { // Don't pass a `visible_from_module` down to `is_valid_candidate`, // since only inherent methods should be included into visibility checking. - let visible = - match is_valid_trait_method_candidate(table, t, name, receiver_ty, item, self_ty) { - IsValidCandidate::Yes => true, - IsValidCandidate::NotVisible => false, - IsValidCandidate::No => continue, - }; + let visible = match is_valid_trait_method_candidate( + table, + t, + name, + receiver_ty, + item, + self_ty, + mode, + ) { + IsValidCandidate::Yes => true, + IsValidCandidate::NotVisible => false, + IsValidCandidate::No => continue, + }; if !known_implemented { - let goal = generic_implements_goal(db, &table.trait_env, t, &canonical_self_ty); - if db.trait_solve(krate, block, goal.cast(Interner)).is_none() { + let goal = generic_implements_goal_ns(table, t, canonical_self_ty); + if next_trait_solve_canonical_in_ctxt(&table.infer_ctxt, goal).no_solution() { continue 'traits; } } @@ -1332,6 +1469,7 @@ fn iterate_inherent_methods( receiver_ty: Option<&Ty>, receiver_adjustments: Option, visible_from_module: VisibleFromModule, + mode: LookupMode, callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; @@ -1355,6 +1493,7 @@ fn iterate_inherent_methods( receiver_adjustments.clone(), callback, traits, + mode, )?; } TyKind::Dyn(_) => { @@ -1368,6 +1507,7 @@ fn iterate_inherent_methods( receiver_adjustments.clone(), callback, traits.into_iter(), + mode, )?; } } @@ -1426,6 +1566,7 @@ fn iterate_inherent_methods( receiver_adjustments: Option, callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, traits: impl Iterator, + mode: LookupMode, ) -> ControlFlow<()> { let db = table.db; for t in traits { @@ -1439,6 +1580,7 @@ fn iterate_inherent_methods( receiver_ty, item, self_ty, + mode, ) { IsValidCandidate::Yes => true, IsValidCandidate::NotVisible => false, @@ -1485,21 +1627,17 @@ fn iterate_inherent_methods( } /// Returns the receiver type for the index trait call. -pub(crate) fn resolve_indexing_op( - db: &dyn HirDatabase, - env: Arc, - ty: Canonical, +pub(crate) fn resolve_indexing_op<'db>( + table: &mut InferenceTable<'db>, + ty: next_solver::Canonical<'db, next_solver::Ty<'db>>, index_trait: TraitId, ) -> Option { - let mut table = InferenceTable::new(db, env); - let ty = table.instantiate_canonical(ty); - let deref_chain = autoderef_method_receiver(&mut table, ty); + let ty = table.instantiate_canonical_ns(ty); + let deref_chain = autoderef_method_receiver(table, ty); for (ty, adj) in deref_chain { - let goal = generic_implements_goal(db, &table.trait_env, index_trait, &ty); - if db - .trait_solve(table.trait_env.krate, table.trait_env.block, goal.cast(Interner)) - .is_some() - { + //let goal = generic_implements_goal_ns(db, &table.trait_env, index_trait, &ty); + let goal = generic_implements_goal_ns(table, index_trait, ty); + if !next_trait_solve_canonical_in_ctxt(&table.infer_ctxt, goal).no_solution() { return Some(adj); } } @@ -1579,6 +1717,7 @@ fn is_valid_trait_method_candidate( receiver_ty: Option<&Ty>, item: AssocItemId, self_ty: &Ty, + mode: LookupMode, ) -> IsValidCandidate { let db = table.db; match item { @@ -1606,6 +1745,36 @@ fn is_valid_trait_method_candidate( let expected_receiver = sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst); + // FIXME: Clean up this mess with some context struct like rustc's `ProbeContext` + let variance = match mode { + LookupMode::MethodCall => rustc_type_ir::Variance::Covariant, + LookupMode::Path => rustc_type_ir::Variance::Invariant, + }; + let res = table + .infer_ctxt + .at( + &next_solver::infer::traits::ObligationCause::dummy(), + table.trait_env.env.to_nextsolver(table.interner), + ) + .relate( + DefineOpaqueTypes::No, + expected_receiver.to_nextsolver(table.interner), + variance, + receiver_ty.to_nextsolver(table.interner), + ); + let Ok(infer_ok) = res else { + return IsValidCandidate::No; + }; + + if !infer_ok.obligations.is_empty() { + let mut ctxt = FulfillmentCtxt::new(&table.infer_ctxt); + for pred in infer_ok.into_obligations() { + ctxt.register_predicate_obligation(&table.infer_ctxt, pred); + } + // FIXME: Are we doing this correctly? Probably better to follow rustc more closely. + check_that!(ctxt.select_where_possible(&table.infer_ctxt).is_empty()); + } + check_that!(table.unify(receiver_ty, &expected_receiver)); } @@ -1683,34 +1852,16 @@ fn is_valid_impl_fn_candidate( }); for goal in goals.clone() { - let in_env = InEnvironment::new(&table.trait_env.env, goal); - let canonicalized = table.canonicalize_with_free_vars(in_env); - let solution = table.db.trait_solve( - table.trait_env.krate, - table.trait_env.block, - canonicalized.value.clone(), - ); - - match solution { - Some(Solution::Unique(canonical_subst)) => { - canonicalized.apply_solution( - table, - Canonical { - binders: canonical_subst.binders, - value: canonical_subst.value.subst, - }, - ); + match table.solve_obligation(goal) { + Ok(_) => {} + Err(_) => { + return IsValidCandidate::No; } - Some(Solution::Ambig(Guidance::Definite(substs))) => { - canonicalized.apply_solution(table, substs); - } - Some(_) => (), - None => return IsValidCandidate::No, } } for goal in goals { - if table.try_obligation(goal).is_none() { + if table.try_obligation(goal).no_solution() { return IsValidCandidate::No; } } @@ -1726,9 +1877,7 @@ pub fn implements_trait( trait_: TraitId, ) -> bool { let goal = generic_implements_goal(db, env, trait_, ty); - let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner)); - - solution.is_some() + !db.trait_solve(env.krate, env.block, goal.cast(Interner)).no_solution() } pub fn implements_trait_unique( @@ -1738,9 +1887,7 @@ pub fn implements_trait_unique( trait_: TraitId, ) -> bool { let goal = generic_implements_goal(db, env, trait_, ty); - let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner)); - - matches!(solution, Some(crate::Solution::Unique(_))) + db.trait_solve(env.krate, env.block, goal.cast(Interner)).certain() } /// This creates Substs for a trait with the given Self type and type variables @@ -1774,12 +1921,35 @@ fn generic_implements_goal( Canonical { binders, value } } -fn autoderef_method_receiver( - table: &mut InferenceTable<'_>, - ty: Ty, -) -> Vec<(Canonical, ReceiverAdjustments)> { - let mut deref_chain: Vec<_> = Vec::new(); - let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty, false, true); +/// This creates Substs for a trait with the given Self type and type variables +/// for all other parameters, to query the trait solver with it. +#[tracing::instrument(skip_all)] +fn generic_implements_goal_ns<'db>( + table: &mut InferenceTable<'db>, + trait_: TraitId, + self_ty: next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, +) -> next_solver::Canonical<'db, next_solver::Goal<'db, crate::next_solver::Predicate<'db>>> { + let args = table.infer_ctxt.fresh_args_for_item(SolverDefId::TraitId(trait_)); + let self_ty = table.instantiate_canonical_ns(self_ty); + let trait_ref = + rustc_type_ir::TraitRef::new_from_args(table.infer_ctxt.interner, trait_.into(), args) + .with_replaced_self_ty(table.infer_ctxt.interner, self_ty); + let goal = next_solver::Goal::new( + table.infer_ctxt.interner, + table.trait_env.env.to_nextsolver(table.infer_ctxt.interner), + trait_ref, + ); + + table.canonicalize(goal) +} + +fn autoderef_method_receiver<'db>( + table: &mut InferenceTable<'db>, + ty: next_solver::Ty<'db>, +) -> Vec<(next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, ReceiverAdjustments)> { + let interner = table.interner; + let mut deref_chain = Vec::new(); + let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty).use_receiver_trait(); while let Some((ty, derefs)) = autoderef.next() { deref_chain.push(( autoderef.table.canonicalize(ty), @@ -1787,12 +1957,12 @@ fn autoderef_method_receiver( )); } // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!) - if let Some((TyKind::Array(parameters, _), binders, adj)) = - deref_chain.last().map(|(ty, adj)| (ty.value.kind(Interner), ty.binders.clone(), adj)) + if let Some((rustc_type_ir::Array(parameters, _), variables, max_universe, adj)) = + deref_chain.last().map(|d| (d.0.value.kind(), d.0.variables, d.0.max_universe, d.1.clone())) { - let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner); + let unsized_ty = next_solver::Ty::new_slice(interner, parameters); deref_chain.push(( - Canonical { value: unsized_ty, binders }, + next_solver::Canonical { max_universe, value: unsized_ty, variables }, ReceiverAdjustments { unsize_array: true, ..adj.clone() }, )); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 482b420279c90..6465099dffd7f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -34,8 +34,7 @@ pub use eval::{ }; pub use lower::{MirLowerError, lower_to_mir, mir_body_for_closure_query, mir_body_query}; pub use monomorphization::{ - monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query, - monomorphized_mir_body_query, + monomorphized_mir_body_for_closure_query, monomorphized_mir_body_query, }; use rustc_hash::FxHashMap; use smallvec::{SmallVec, smallvec}; @@ -107,7 +106,7 @@ pub enum OperandKind { } impl Operand { - fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self { + fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'static>, ty: Ty) -> Self { Operand { kind: OperandKind::Constant(intern_const_scalar( ConstScalar::Bytes(data, memory_map), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index 52df851c30d13..2c09fb9a89e78 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -113,8 +113,13 @@ fn make_fetch_closure_field( let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); let infer = db.infer(def); let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); - captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst) + let parent_subst = ClosureSubst(subst).parent_subst(db); + captures + .get(f) + .expect("broken closure field") + .ty + .clone() + .substitute(Interner, &parent_subst) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index dfb8ae704b996..3e658cb93ed8a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -3,6 +3,7 @@ use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range}; use base_db::Crate; +use base_db::target::TargetLoadError; use chalk_ir::{Mutability, cast::Cast}; use either::Either; use hir_def::{ @@ -25,21 +26,26 @@ use rustc_apfloat::{ ieee::{Half as f16, Quad as f128}, }; use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike}; use span::FileId; use stdx::never; use syntax::{SyntaxNodePtr, TextRange}; use triomphe::Arc; use crate::{ - AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, - Interner, MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, + AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, Interner, + MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, consteval::{ConstEvalError, intern_const_scalar, try_const_usize}, + consteval_nextsolver, db::{HirDatabase, InternedClosure}, display::{ClosureStyle, DisplayTarget, HirDisplay}, infer::PointerCast, layout::{Layout, LayoutError, RustcEnumVariantIdx}, - mapping::from_chalk, method_resolution::{is_dyn_method, lookup_impl_const}, + next_solver::{ + DbInterner, + mapping::{ChalkToNextSolver, convert_args_for_result, convert_ty_for_result}, + }, static_lifetime, traits::FnTrait, utils::{ClosureSubst, detect_variant_from_bytes}, @@ -78,31 +84,31 @@ macro_rules! not_supported { } #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct VTableMap { - ty_to_id: FxHashMap, - id_to_ty: Vec, +pub struct VTableMap<'db> { + ty_to_id: FxHashMap, usize>, + id_to_ty: Vec>, } -impl VTableMap { +impl<'db> VTableMap<'db> { const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id. - fn id(&mut self, ty: Ty) -> usize { + fn id(&mut self, ty: crate::next_solver::Ty<'db>) -> usize { if let Some(it) = self.ty_to_id.get(&ty) { return *it; } let id = self.id_to_ty.len() + VTableMap::OFFSET; - self.id_to_ty.push(ty.clone()); + self.id_to_ty.push(ty); self.ty_to_id.insert(ty, id); id } - pub(crate) fn ty(&self, id: usize) -> Result<&Ty> { + pub(crate) fn ty(&self, id: usize) -> Result> { id.checked_sub(VTableMap::OFFSET) - .and_then(|id| self.id_to_ty.get(id)) + .and_then(|id| self.id_to_ty.get(id).copied()) .ok_or(MirEvalError::InvalidVTableId(id)) } - fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> { + fn ty_of_bytes(&self, bytes: &[u8]) -> Result> { let id = from_bytes!(usize, bytes); self.ty(id) } @@ -170,12 +176,12 @@ pub struct Evaluator<'a> { /// We don't really have function pointers, i.e. pointers to some assembly instructions that we can run. Instead, we /// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the /// time of use. - vtable_map: VTableMap, + vtable_map: VTableMap<'a>, thread_local_storage: TlsData, random_state: oorandom::Rand64, stdout: Vec, stderr: Vec, - layout_cache: RefCell>>, + layout_cache: RefCell, Arc>>, projected_ty_cache: RefCell>, not_special_fn_cache: RefCell>, mir_or_dyn_index_cache: RefCell>, @@ -224,7 +230,7 @@ impl Interval { Self { addr, size } } - fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { + fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> { memory.read_memory(self.addr, self.size) } @@ -242,7 +248,7 @@ impl Interval { } impl IntervalAndTy { - fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { + fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> { memory.read_memory(self.interval.addr, self.interval.size) } @@ -269,7 +275,7 @@ impl From for IntervalOrOwned { } impl IntervalOrOwned { - fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { + fn get<'a, 'db>(&'a self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> { Ok(match self { IntervalOrOwned::Owned(o) => o, IntervalOrOwned::Borrowed(b) => b.get(memory)?, @@ -332,7 +338,7 @@ impl Address { pub enum MirEvalError { ConstEvalError(String, Box), LayoutError(LayoutError, Ty), - TargetDataLayoutNotAvailable(Arc), + TargetDataLayoutNotAvailable(TargetLoadError), /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// then use this type of error. UndefinedBehavior(String), @@ -608,7 +614,13 @@ pub fn interpret_mir( memory_map.vtable.shrink_to_fit(); MemoryMap::Complex(Box::new(memory_map)) }; - Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty)) + // SAFETY: will never use this without a db + Ok(intern_const_scalar( + ConstScalar::Bytes(bytes, unsafe { + std::mem::transmute::, MemoryMap<'static>>(memory_map) + }), + ty, + )) })(); Ok((it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr })) } @@ -618,7 +630,7 @@ const EXECUTION_LIMIT: usize = 100_000; #[cfg(not(test))] const EXECUTION_LIMIT: usize = 10_000_000; -impl Evaluator<'_> { +impl<'db> Evaluator<'db> { pub fn new( db: &dyn HirDatabase, owner: DefWithBodyId, @@ -700,13 +712,13 @@ impl Evaluator<'_> { let InternedClosure(def, _) = self.db.lookup_intern_closure(c.into()); let infer = self.db.infer(def); let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); + let parent_subst = ClosureSubst(subst).parent_subst(self.db); captures .get(f) .expect("broken closure field") .ty .clone() - .substitute(Interner, parent_subst) + .substitute(Interner, &parent_subst) }, self.crate_id, ); @@ -719,6 +731,7 @@ impl Evaluator<'_> { p: &Place, locals: &'a Locals, ) -> Result<(Address, Ty, Option)> { + let interner = DbInterner::new_with(self.db, None, None); let mut addr = locals.ptr[p.local].addr; let mut ty: Ty = locals.body.locals[p.local].ty.clone(); let mut metadata: Option = None; // locals are always sized @@ -791,19 +804,19 @@ impl Evaluator<'_> { addr = addr.offset(ty_size * (from as usize)); } &ProjectionElem::ClosureField(f) => { - let layout = self.layout(&prev_ty)?; + let layout = self.layout(prev_ty.to_nextsolver(interner))?; let offset = layout.fields.offset(f).bytes_usize(); addr = addr.offset(offset); metadata = None; } ProjectionElem::Field(Either::Right(f)) => { - let layout = self.layout(&prev_ty)?; + let layout = self.layout(prev_ty.to_nextsolver(interner))?; let offset = layout.fields.offset(f.index as usize).bytes_usize(); addr = addr.offset(offset); metadata = None; // tuple field is always sized FIXME: This is wrong, the tail can be unsized } ProjectionElem::Field(Either::Left(f)) => { - let layout = self.layout(&prev_ty)?; + let layout = self.layout(prev_ty.to_nextsolver(interner))?; let variant_layout = match &layout.variants { Variants::Single { .. } | Variants::Empty => &layout, Variants::Multiple { variants, .. } => { @@ -835,20 +848,28 @@ impl Evaluator<'_> { Ok((addr, ty, metadata)) } - fn layout(&self, ty: &Ty) -> Result> { - if let Some(x) = self.layout_cache.borrow().get(ty) { + fn layout(&self, ty: crate::next_solver::Ty<'db>) -> Result> { + if let Some(x) = self.layout_cache.borrow().get(&ty) { return Ok(x.clone()); } + let interner = DbInterner::new_with(self.db, None, None); let r = self .db - .layout_of_ty(ty.clone(), self.trait_env.clone()) - .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))?; - self.layout_cache.borrow_mut().insert(ty.clone(), r.clone()); + .layout_of_ty(ty, self.trait_env.clone()) + .map_err(|e| MirEvalError::LayoutError(e, convert_ty_for_result(interner, ty)))?; + self.layout_cache.borrow_mut().insert(ty, r.clone()); Ok(r) } fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result> { - self.layout(&TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner)) + let interner = DbInterner::new_with(self.db, None, None); + self.layout(crate::next_solver::Ty::new( + interner, + rustc_type_ir::TyKind::Adt( + crate::next_solver::AdtDef::new(adt, interner), + subst.to_nextsolver(interner), + ), + )) } fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result { @@ -952,7 +973,7 @@ impl Evaluator<'_> { )? } TyKind::FnDef(def, generic_args) => self.exec_fn_def( - *def, + CallableDefId::from_chalk(self.db, *def), generic_args, destination_interval, &args, @@ -1113,6 +1134,7 @@ impl Evaluator<'_> { } fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result { + let interner = DbInterner::new_with(self.db, None, None); use IntervalOrOwned::*; Ok(match r { Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?), @@ -1436,7 +1458,7 @@ impl Evaluator<'_> { Owned(r) } AggregateKind::Tuple(ty) => { - let layout = self.layout(ty)?; + let layout = self.layout(ty.to_nextsolver(interner))?; Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -1467,7 +1489,7 @@ impl Evaluator<'_> { )?) } AggregateKind::Closure(ty) => { - let layout = self.layout(ty)?; + let layout = self.layout(ty.to_nextsolver(interner))?; Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -1484,6 +1506,8 @@ impl Evaluator<'_> { if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) = ¤t_ty.kind(Interner) { + let interner = DbInterner::new_with(self.db, None, None); + let current_ty = current_ty.to_nextsolver(interner); let id = self.vtable_map.id(current_ty); let ptr_size = self.ptr_size(); Owned(id.to_le_bytes()[0..ptr_size].to_vec()) @@ -1623,7 +1647,8 @@ impl Evaluator<'_> { } fn compute_discriminant(&self, ty: Ty, bytes: &[u8]) -> Result { - let layout = self.layout(&ty)?; + let interner = DbInterner::new_with(self.db, None, None); + let layout = self.layout(ty.to_nextsolver(interner))?; let &TyKind::Adt(chalk_ir::AdtId(AdtId::EnumId(e)), _) = ty.kind(Interner) else { return Ok(0); }; @@ -1732,6 +1757,8 @@ impl Evaluator<'_> { } }, TyKind::Dyn(_) => { + let interner = DbInterner::new_with(self.db, None, None); + let current_ty = current_ty.to_nextsolver(interner); let vtable = self.vtable_map.id(current_ty); let mut r = Vec::with_capacity(16); let addr = addr.get(self)?; @@ -1777,6 +1804,7 @@ impl Evaluator<'_> { subst: Substitution, locals: &Locals, ) -> Result<(usize, Arc, Option<(usize, usize, i128)>)> { + let interner = DbInterner::new_with(self.db, None, None); let adt = it.adt_id(self.db); if let DefWithBodyId::VariantId(f) = locals.body.owner && let VariantId::EnumVariantId(it) = it @@ -1786,7 +1814,11 @@ impl Evaluator<'_> { // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and // infinite sized type errors) we use a dummy layout let i = self.const_eval_discriminant(it)?; - return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); + return Ok(( + 16, + self.layout(crate::next_solver::Ty::new_empty_tuple(interner))?, + Some((0, 16, i)), + )); } let layout = self.layout_adt(adt, subst)?; Ok(match &layout.variants { @@ -1885,6 +1917,7 @@ impl Evaluator<'_> { #[allow(clippy::double_parens)] fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result { + let interner = DbInterner::new_with(self.db, None, None); let ConstData { ty, value: chalk_ir::ConstValue::Concrete(c) } = &konst.data(Interner) else { not_supported!("evaluating non concrete constant"); @@ -1945,7 +1978,7 @@ impl Evaluator<'_> { MemoryMap::Complex(cm) => cm.vtable.ty_of_bytes(bytes), }, addr, - ty, + ty.to_nextsolver(interner), locals, )?; Ok(Interval::new(addr, size)) @@ -2048,7 +2081,8 @@ impl Evaluator<'_> { } fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result> { - if let Some(layout) = self.layout_cache.borrow().get(ty) { + let interner = DbInterner::new_with(self.db, None, None); + if let Some(layout) = self.layout_cache.borrow().get(&ty.to_nextsolver(interner)) { return Ok(layout .is_sized() .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize))); @@ -2061,7 +2095,7 @@ impl Evaluator<'_> { // infinite sized type errors) we use a dummy size return Ok(Some((16, 16))); } - let layout = self.layout(ty); + let layout = self.layout(ty.to_nextsolver(interner)); if self.assert_placeholder_ty_is_unused && matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _))) { @@ -2129,15 +2163,16 @@ impl Evaluator<'_> { bytes: &[u8], ty: &Ty, locals: &Locals, - ) -> Result { - fn rec( - this: &Evaluator<'_>, + ) -> Result> { + fn rec<'db>( + this: &Evaluator<'db>, bytes: &[u8], ty: &Ty, locals: &Locals, - mm: &mut ComplexMemoryMap, + mm: &mut ComplexMemoryMap<'db>, stack_depth_limit: usize, ) -> Result<()> { + let interner = DbInterner::new_with(this.db, None, None); if stack_depth_limit.checked_sub(1).is_none() { return Err(MirEvalError::StackOverflow); } @@ -2158,13 +2193,14 @@ impl Evaluator<'_> { let element_size = match t.kind(Interner) { TyKind::Str => 1, TyKind::Slice(t) => { - check_inner = Some(t); + check_inner = Some(t.clone()); this.size_of_sized(t, locals, "slice inner type")? } TyKind::Dyn(_) => { let t = this.vtable_map.ty_of_bytes(meta)?; - check_inner = Some(t); - this.size_of_sized(t, locals, "dyn concrete type")? + let t = convert_ty_for_result(interner, t); + check_inner = Some(t.clone()); + this.size_of_sized(&t, locals, "dyn concrete type")? } _ => return Ok(()), }; @@ -2176,7 +2212,7 @@ impl Evaluator<'_> { let addr = Address::from_bytes(addr)?; let b = this.read_memory(addr, size)?; mm.insert(addr.to_usize(), b.into()); - if let Some(ty) = check_inner { + if let Some(ty) = &check_inner { for i in 0..count { let offset = element_size * i; rec( @@ -2211,11 +2247,11 @@ impl Evaluator<'_> { } } TyKind::Tuple(_, subst) => { - let layout = this.layout(ty)?; + let layout = this.layout(ty.to_nextsolver(interner))?; for (id, ty) in subst.iter(Interner).enumerate() { let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument let offset = layout.fields.offset(id).bytes_usize(); - let size = this.layout(ty)?.size.bytes_usize(); + let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize(); rec( this, &bytes[offset..offset + size], @@ -2229,7 +2265,7 @@ impl Evaluator<'_> { TyKind::Adt(adt, subst) => match adt.0 { AdtId::StructId(s) => { let data = s.fields(this.db); - let layout = this.layout(ty)?; + let layout = this.layout(ty.to_nextsolver(interner))?; let field_types = this.db.field_types(s.into()); for (f, _) in data.fields().iter() { let offset = layout @@ -2237,7 +2273,7 @@ impl Evaluator<'_> { .offset(u32::from(f.into_raw()) as usize) .bytes_usize(); let ty = &field_types[f].clone().substitute(Interner, subst); - let size = this.layout(ty)?.size.bytes_usize(); + let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize(); rec( this, &bytes[offset..offset + size], @@ -2249,7 +2285,7 @@ impl Evaluator<'_> { } } AdtId::EnumId(e) => { - let layout = this.layout(ty)?; + let layout = this.layout(ty.to_nextsolver(interner))?; if let Some((v, l)) = detect_variant_from_bytes( &layout, this.db, @@ -2263,7 +2299,8 @@ impl Evaluator<'_> { let offset = l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize(); let ty = &field_types[f].clone().substitute(Interner, subst); - let size = this.layout(ty)?.size.bytes_usize(); + let size = + this.layout(ty.to_nextsolver(interner))?.size.bytes_usize(); rec( this, &bytes[offset..offset + size], @@ -2290,20 +2327,26 @@ impl Evaluator<'_> { Ok(mm) } - fn patch_addresses<'vtable>( + fn patch_addresses( &mut self, patch_map: &FxHashMap, - ty_of_bytes: impl Fn(&[u8]) -> Result<&'vtable Ty> + Copy, + ty_of_bytes: impl Fn(&[u8]) -> Result> + Copy, addr: Address, - ty: &Ty, + ty: crate::next_solver::Ty<'db>, locals: &Locals, ) -> Result<()> { + let interner = DbInterner::new_with(self.db, None, None); // FIXME: support indirect references let layout = self.layout(ty)?; - let my_size = self.size_of_sized(ty, locals, "value to patch address")?; - match ty.kind(Interner) { - TyKind::Ref(_, _, t) => { - let size = self.size_align_of(t, locals)?; + let my_size = self.size_of_sized( + &convert_ty_for_result(interner, ty), + locals, + "value to patch address", + )?; + use rustc_type_ir::TyKind; + match ty.kind() { + TyKind::Ref(_, t, _) => { + let size = self.size_align_of(&convert_ty_for_result(interner, t), locals)?; match size { Some(_) => { let current = from_bytes!(usize, self.read_memory(addr, my_size)?); @@ -2319,21 +2362,21 @@ impl Evaluator<'_> { } } } - TyKind::Function(_) => { - let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?.clone(); + TyKind::FnPtr(_, _) => { + let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?; let new_id = self.vtable_map.id(ty); self.write_memory(addr, &new_id.to_le_bytes())?; } - TyKind::Adt(id, subst) => match id.0 { + TyKind::Adt(id, args) => match id.def_id().0 { AdtId::StructId(s) => { - for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() { + for (i, (_, ty)) in self.db.field_types_ns(s.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); - let ty = ty.clone().substitute(Interner, subst); + let ty = ty.instantiate(interner, args); self.patch_addresses( patch_map, ty_of_bytes, addr.offset(offset), - &ty, + ty, locals, )?; } @@ -2347,33 +2390,36 @@ impl Evaluator<'_> { self.read_memory(addr, layout.size.bytes_usize())?, e, ) { - for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() { + for (i, (_, ty)) in self.db.field_types_ns(ev.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); - let ty = ty.clone().substitute(Interner, subst); + let ty = ty.instantiate(interner, args); self.patch_addresses( patch_map, ty_of_bytes, addr.offset(offset), - &ty, + ty, locals, )?; } } } }, - TyKind::Tuple(_, subst) => { - for (id, ty) in subst.iter(Interner).enumerate() { - let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument + TyKind::Tuple(tys) => { + for (id, ty) in tys.iter().enumerate() { let offset = layout.fields.offset(id).bytes_usize(); self.patch_addresses(patch_map, ty_of_bytes, addr.offset(offset), ty, locals)?; } } TyKind::Array(inner, len) => { - let len = match try_const_usize(self.db, len) { + let len = match consteval_nextsolver::try_const_usize(self.db, len) { Some(it) => it as usize, None => not_supported!("non evaluatable array len in patching addresses"), }; - let size = self.size_of_sized(inner, locals, "inner of array")?; + let size = self.size_of_sized( + &convert_ty_for_result(interner, inner), + locals, + "inner of array", + )?; for i in 0..len { self.patch_addresses( patch_map, @@ -2384,11 +2430,13 @@ impl Evaluator<'_> { )?; } } - TyKind::AssociatedType(_, _) - | TyKind::Scalar(_) + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) | TyKind::Slice(_) - | TyKind::Raw(_, _) - | TyKind::OpaqueType(_, _) + | TyKind::RawPtr(_, _) | TyKind::FnDef(_, _) | TyKind::Str | TyKind::Never @@ -2396,12 +2444,16 @@ impl Evaluator<'_> { | TyKind::Coroutine(_, _) | TyKind::CoroutineWitness(_, _) | TyKind::Foreign(_) - | TyKind::Error + | TyKind::Error(_) | TyKind::Placeholder(_) - | TyKind::Dyn(_) - | TyKind::Alias(_) - | TyKind::BoundVar(_) - | TyKind::InferenceVar(_, _) => (), + | TyKind::Dynamic(_, _) + | TyKind::Alias(_, _) + | TyKind::Bound(_, _) + | TyKind::Infer(_) + | TyKind::Pat(_, _) + | TyKind::Param(_) + | TyKind::UnsafeBinder(_) + | TyKind::CoroutineClosure(_, _) => (), } Ok(()) } @@ -2416,14 +2468,28 @@ impl Evaluator<'_> { span: MirSpan, ) -> Result> { let id = from_bytes!(usize, bytes.get(self)?); - let next_ty = self.vtable_map.ty(id)?.clone(); - match next_ty.kind(Interner) { - TyKind::FnDef(def, generic_args) => { - self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span) - } - TyKind::Closure(id, subst) => { - self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span) - } + let next_ty = self.vtable_map.ty(id)?; + let interner = DbInterner::new_with(self.db, None, None); + use rustc_type_ir::TyKind; + match next_ty.kind() { + TyKind::FnDef(def, generic_args) => self.exec_fn_def( + def.0, + &convert_args_for_result(interner, generic_args.as_slice()), + destination, + args, + locals, + target_bb, + span, + ), + TyKind::Closure(id, generic_args) => self.exec_closure( + id.0.into(), + bytes.slice(0..0), + &convert_args_for_result(interner, generic_args.as_slice()), + destination, + args, + locals, + span, + ), _ => Err(MirEvalError::InternalError("function pointer to non function".into())), } } @@ -2469,7 +2535,7 @@ impl Evaluator<'_> { fn exec_fn_def( &mut self, - def: FnDefId, + def: CallableDefId, generic_args: &Substitution, destination: Interval, args: &[IntervalAndTy], @@ -2477,7 +2543,6 @@ impl Evaluator<'_> { target_bb: Option, span: MirSpan, ) -> Result> { - let def: CallableDefId = from_chalk(self.db, def); let generic_args = generic_args.clone(); match def { CallableDefId::FunctionId(def) => { @@ -2574,6 +2639,7 @@ impl Evaluator<'_> { target_bb: Option, span: MirSpan, ) -> Result> { + let interner = DbInterner::new_with(self.db, None, None); if self.detect_and_exec_special_function( def, args, @@ -2600,6 +2666,7 @@ impl Evaluator<'_> { .vtable_map .ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?; let mut args_for_target = args.to_vec(); + let ty = convert_ty_for_result(interner, ty); args_for_target[0] = IntervalAndTy { interval: args_for_target[0].interval.slice(0..self.ptr_size()), ty: ty.clone(), @@ -2672,6 +2739,7 @@ impl Evaluator<'_> { target_bb: Option, span: MirSpan, ) -> Result> { + let interner = DbInterner::new_with(self.db, None, None); let func = args .first() .ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?; @@ -2683,22 +2751,28 @@ impl Evaluator<'_> { let id = from_bytes!(usize, &func_data.get(self)?[self.ptr_size()..self.ptr_size() * 2]); func_data = func_data.slice(0..self.ptr_size()); - func_ty = self.vtable_map.ty(id)?.clone(); + func_ty = convert_ty_for_result(interner, self.vtable_map.ty(id)?); } let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?; func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size }; } match &func_ty.kind(Interner) { - TyKind::FnDef(def, subst) => { - self.exec_fn_def(*def, subst, destination, &args[1..], locals, target_bb, span) - } + TyKind::FnDef(def, subst) => self.exec_fn_def( + CallableDefId::from_chalk(self.db, *def), + subst, + destination, + &args[1..], + locals, + target_bb, + span, + ), TyKind::Function(_) => { self.exec_fn_pointer(func_data, destination, &args[1..], locals, target_bb, span) } TyKind::Closure(closure, subst) => self.exec_closure( *closure, func_data, - &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()), + &ClosureSubst(subst).parent_subst(self.db), destination, &args[1..], locals, @@ -2714,7 +2788,7 @@ impl Evaluator<'_> { Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())), ) .intern(Interner); - let layout = self.layout(&ty)?; + let layout = self.layout(ty.to_nextsolver(interner))?; let result = self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -2901,6 +2975,7 @@ pub fn render_const_using_debug_impl( owner: DefWithBodyId, c: &Const, ) -> Result { + let interner = DbInterner::new_with(db, None, None); let mut evaluator = Evaluator::new(db, owner, false, None)?; let locals = &Locals { ptr: ArenaMap::new(), @@ -2933,7 +3008,8 @@ pub fn render_const_using_debug_impl( CallableDefId::FunctionId(debug_fmt_fn).to_chalk(db), Substitution::from1(Interner, c.data(Interner).ty.clone()), ) - .intern(Interner)); + .intern(Interner) + .to_nextsolver(interner)); evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?; // a3 = ::core::fmt::Arguments::new_v1(a1, a2) // FIXME: similarly, we should call function here, not directly working with memory. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index bb4c963a8ae15..f67778b0f12f3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -4,6 +4,7 @@ use std::cmp::{self, Ordering}; use chalk_ir::TyKind; +use hir_def::signatures::FunctionSignature; use hir_def::{ CrateRootModuleId, builtin_type::{BuiltinInt, BuiltinUint}, @@ -23,6 +24,10 @@ use crate::{ LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution, Ty, TyBuilder, TyExt, pad16, }, + next_solver::{ + DbInterner, + mapping::{ChalkToNextSolver, convert_ty_for_result}, + }, }; mod simd; @@ -59,17 +64,7 @@ impl Evaluator<'_> { let function_data = self.db.function_signature(def); let attrs = self.db.attrs(def.into()); - let is_intrinsic = attrs.by_key(sym::rustc_intrinsic).exists() - // Keep this around for a bit until extern "rustc-intrinsic" abis are no longer used - || (match &function_data.abi { - Some(abi) => *abi == sym::rust_dash_intrinsic, - None => match def.lookup(self.db).container { - hir_def::ItemContainerId::ExternBlockId(block) => { - block.abi(self.db) == Some(sym::rust_dash_intrinsic) - } - _ => false, - }, - }); + let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def); if is_intrinsic { return self.exec_intrinsic( @@ -171,6 +166,7 @@ impl Evaluator<'_> { destination: Interval, span: MirSpan, ) -> Result<()> { + let interner = DbInterner::new_with(self.db, None, None); match self_ty.kind(Interner) { TyKind::Function(_) => { let [arg] = args else { @@ -188,8 +184,8 @@ impl Evaluator<'_> { let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure((*id).into()); let infer = self.db.infer(closure_owner); let (captures, _) = infer.closure_info(id); - let layout = self.layout(&self_ty)?; - let ty_iter = captures.iter().map(|c| c.ty(subst)); + let layout = self.layout(self_ty.to_nextsolver(interner))?; + let ty_iter = captures.iter().map(|c| c.ty(self.db, subst)); self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?; } TyKind::Tuple(_, subst) => { @@ -197,7 +193,7 @@ impl Evaluator<'_> { not_supported!("wrong arg count for clone"); }; let addr = Address::from_bytes(arg.get(self)?)?; - let layout = self.layout(&self_ty)?; + let layout = self.layout(self_ty.to_nextsolver(interner))?; let ty_iter = subst.iter(Interner).map(|ga| ga.assert_ty_ref(Interner).clone()); self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?; } @@ -226,8 +222,9 @@ impl Evaluator<'_> { destination: Interval, span: MirSpan, ) -> Result<()> { + let interner = DbInterner::new_with(self.db, None, None); for (i, ty) in ty_iter.enumerate() { - let size = self.layout(&ty)?.size.bytes_usize(); + let size = self.layout(ty.to_nextsolver(interner))?.size.bytes_usize(); let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?; let arg = IntervalAndTy { interval: Interval { addr: tmp, size: self.ptr_size() }, @@ -592,6 +589,7 @@ impl Evaluator<'_> { span: MirSpan, needs_override: bool, ) -> Result { + let interner = DbInterner::new_with(self.db, None, None); if let Some(name) = name.strip_prefix("atomic_") { return self .exec_atomic_intrinsic(name, args, generic_args, destination, locals, span) @@ -769,7 +767,7 @@ impl Evaluator<'_> { "align_of generic arg is not provided".into(), )); }; - let align = self.layout(ty)?.align.abi.bytes(); + let align = self.layout(ty.to_nextsolver(interner))?.align.abi.bytes(); destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size]) } "size_of_val" => { @@ -1025,7 +1023,7 @@ impl Evaluator<'_> { let is_overflow = u128overflow || ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255); let is_overflow = vec![u8::from(is_overflow)]; - let layout = self.layout(&result_ty)?; + let layout = self.layout(result_ty.to_nextsolver(interner))?; let result = self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -1249,7 +1247,7 @@ impl Evaluator<'_> { "const_eval_select arg[0] is not a tuple".into(), )); }; - let layout = self.layout(&tuple.ty)?; + let layout = self.layout(tuple.ty.to_nextsolver(interner))?; for (i, field) in fields.iter(Interner).enumerate() { let field = field.assert_ty_ref(Interner).clone(); let offset = layout.fields.offset(i).bytes_usize(); @@ -1408,6 +1406,7 @@ impl Evaluator<'_> { metadata: Interval, locals: &Locals, ) -> Result<(usize, usize)> { + let interner = DbInterner::new_with(self.db, None, None); Ok(match ty.kind(Interner) { TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1), TyKind::Slice(inner) => { @@ -1416,7 +1415,7 @@ impl Evaluator<'_> { (size * len, align) } TyKind::Dyn(_) => self.size_align_of_sized( - self.vtable_map.ty_of_bytes(metadata.get(self)?)?, + &convert_ty_for_result(interner, self.vtable_map.ty_of_bytes(metadata.get(self)?)?), locals, "dyn concrete type", )?, @@ -1463,6 +1462,7 @@ impl Evaluator<'_> { locals: &Locals, _span: MirSpan, ) -> Result<()> { + let interner = DbInterner::new_with(self.db, None, None); // We are a single threaded runtime with no UB checking and no optimization, so // we can implement atomic intrinsics as normal functions. @@ -1560,7 +1560,7 @@ impl Evaluator<'_> { Substitution::from_iter(Interner, [ty.clone(), TyBuilder::bool()]), ) .intern(Interner); - let layout = self.layout(&result_ty)?; + let layout = self.layout(result_ty.to_nextsolver(interner))?; let result = self.construct_with_layout( layout.size.bytes_usize(), &layout, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index c1f86960e154c..2a6e3a147a7d7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -5,39 +5,44 @@ use syntax::{TextRange, TextSize}; use test_fixture::WithFixture; use crate::display::DisplayTarget; -use crate::{Interner, Substitution, db::HirDatabase, mir::MirLowerError, test_db::TestDB}; +use crate::{ + Interner, Substitution, db::HirDatabase, mir::MirLowerError, setup_tracing, test_db::TestDB, +}; use super::{MirEvalError, interpret_mir}; fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> { - let module_id = db.module_for_file(file_id.file_id(db)); - let def_map = module_id.def_map(db); - let scope = &def_map[module_id.local_id].scope; - let func_id = scope - .declarations() - .find_map(|x| match x { - hir_def::ModuleDefId::FunctionId(x) => { - if db.function_signature(x).name.display(db, Edition::CURRENT).to_string() == "main" - { - Some(x) - } else { - None + salsa::attach(db, || { + let module_id = db.module_for_file(file_id.file_id(db)); + let def_map = module_id.def_map(db); + let scope = &def_map[module_id.local_id].scope; + let func_id = scope + .declarations() + .find_map(|x| match x { + hir_def::ModuleDefId::FunctionId(x) => { + if db.function_signature(x).name.display(db, Edition::CURRENT).to_string() + == "main" + { + Some(x) + } else { + None + } } - } - _ => None, - }) - .expect("no main function found"); - let body = db - .monomorphized_mir_body( - func_id.into(), - Substitution::empty(Interner), - db.trait_environment(func_id.into()), - ) - .map_err(|e| MirEvalError::MirLowerError(func_id, e))?; - - let (result, output) = interpret_mir(db, body, false, None)?; - result?; - Ok((output.stdout().into_owned(), output.stderr().into_owned())) + _ => None, + }) + .expect("no main function found"); + let body = db + .monomorphized_mir_body( + func_id.into(), + Substitution::empty(Interner), + db.trait_environment(func_id.into()), + ) + .map_err(|e| MirEvalError::MirLowerError(func_id, e))?; + + let (result, output) = interpret_mir(db, body, false, None)?; + result?; + Ok((output.stdout().into_owned(), output.stderr().into_owned())) + }) } fn check_pass(#[rust_analyzer::rust_fixture] ra_fixture: &str) { @@ -49,44 +54,62 @@ fn check_pass_and_stdio( expected_stdout: &str, expected_stderr: &str, ) { + let _tracing = setup_tracing(); let (db, file_ids) = TestDB::with_many_files(ra_fixture); - let file_id = *file_ids.last().unwrap(); - let x = eval_main(&db, file_id); - match x { - Err(e) => { - let mut err = String::new(); - let line_index = |size: TextSize| { - let mut size = u32::from(size) as usize; - let lines = ra_fixture.lines().enumerate(); - for (i, l) in lines { - if let Some(x) = size.checked_sub(l.len()) { - size = x; - } else { - return (i, size); + salsa::attach(&db, || { + let file_id = *file_ids.last().unwrap(); + let x = eval_main(&db, file_id); + match x { + Err(e) => { + let mut err = String::new(); + let line_index = |size: TextSize| { + let mut size = u32::from(size) as usize; + let lines = ra_fixture.lines().enumerate(); + for (i, l) in lines { + if let Some(x) = size.checked_sub(l.len()) { + size = x; + } else { + return (i, size); + } } - } - (usize::MAX, size) - }; - let span_formatter = |file, range: TextRange| { - format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end())) - }; - let krate = db.module_for_file(file_id.file_id(&db)).krate(); - e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate)) + (usize::MAX, size) + }; + let span_formatter = |file, range: TextRange| { + format!( + "{:?} {:?}..{:?}", + file, + line_index(range.start()), + line_index(range.end()) + ) + }; + let krate = db.module_for_file(file_id.file_id(&db)).krate(); + e.pretty_print( + &mut err, + &db, + span_formatter, + DisplayTarget::from_crate(&db, krate), + ) .unwrap(); - panic!("Error in interpreting: {err}"); - } - Ok((stdout, stderr)) => { - assert_eq!(stdout, expected_stdout); - assert_eq!(stderr, expected_stderr); + panic!("Error in interpreting: {err}"); + } + Ok((stdout, stderr)) => { + assert_eq!(stdout, expected_stdout); + assert_eq!(stderr, expected_stderr); + } } - } + }) } fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic: &str) { let (db, file_ids) = TestDB::with_many_files(ra_fixture); - let file_id = *file_ids.last().unwrap(); - let e = eval_main(&db, file_id).unwrap_err(); - assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic); + salsa::attach(&db, || { + let file_id = *file_ids.last().unwrap(); + let e = eval_main(&db, file_id).unwrap_err(); + assert_eq!( + e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), + expected_panic + ); + }) } fn check_error_with( @@ -94,9 +117,11 @@ fn check_error_with( expect_err: impl FnOnce(MirEvalError) -> bool, ) { let (db, file_ids) = TestDB::with_many_files(ra_fixture); - let file_id = *file_ids.last().unwrap(); - let e = eval_main(&db, file_id).unwrap_err(); - assert!(expect_err(e)); + salsa::attach(&db, || { + let file_id = *file_ids.last().unwrap(); + let e = eval_main(&db, file_id).unwrap_err(); + assert!(expect_err(e)); + }) } #[test] @@ -489,7 +514,7 @@ fn main() { fn from_fn() { check_pass( r#" -//- minicore: fn, iterator +//- minicore: fn, iterator, sized struct FromFn(F); impl Option> Iterator for FromFn { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index eb80e8706fa0c..50e416a66a64b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -31,7 +31,7 @@ use crate::{ display::{DisplayTarget, HirDisplay, hir_display_with_store}, error_lifetime, generics::generics, - infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy, unify::InferenceTable}, + infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy}, inhabitedness::is_ty_uninhabited_from, layout::LayoutError, mapping::ToChalk, @@ -43,6 +43,7 @@ use crate::{ Terminator, TerminatorKind, TupleFieldId, Ty, UnOp, VariantId, intern_const_scalar, return_slot, }, + next_solver::{DbInterner, mapping::ChalkToNextSolver}, static_lifetime, traits::FnTrait, utils::ClosureSubst, @@ -52,6 +53,8 @@ use super::OperandKind; mod as_place; mod pattern_matching; +#[cfg(test)] +mod tests; #[derive(Debug, Clone)] struct LoopBlocks { @@ -947,8 +950,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let cast_kind = if source_ty.as_reference().is_some() { CastKind::PointerCoercion(PointerCast::ArrayToPointer) } else { - let mut table = InferenceTable::new(self.db, self.env.clone()); - cast_kind(&mut table, &source_ty, &target_ty)? + cast_kind(self.db, &source_ty, &target_ty)? }; Rvalue::Cast(cast_kind, it, target_ty) @@ -1411,8 +1413,12 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result { - let size = - || self.db.layout_of_ty(ty.clone(), self.env.clone()).map(|it| it.size.bytes_usize()); + let interner = DbInterner::new_with(self.db, None, None); + let size = || { + self.db + .layout_of_ty(ty.to_nextsolver(interner), self.env.clone()) + .map(|it| it.size.bytes_usize()) + }; const USIZE_SIZE: usize = size_of::(); let bytes: Box<[_]> = match l { hir_def::hir::Literal::String(b) => { @@ -2012,9 +2018,9 @@ impl<'ctx> MirLowerCtx<'ctx> { } } -fn cast_kind(table: &mut InferenceTable<'_>, source_ty: &Ty, target_ty: &Ty) -> Result { - let from = CastTy::from_ty(table, source_ty); - let cast = CastTy::from_ty(table, target_ty); +fn cast_kind(db: &dyn HirDatabase, source_ty: &Ty, target_ty: &Ty) -> Result { + let from = CastTy::from_ty(db, source_ty); + let cast = CastTy::from_ty(db, target_ty); Ok(match (from, cast) { (Some(CastTy::Ptr(..) | CastTy::FnPtr), Some(CastTy::Int(_))) => { CastKind::PointerExposeAddress @@ -2059,7 +2065,7 @@ pub fn mir_body_for_closure_query( }, }); ctx.result.param_locals.push(closure_local); - let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else { + let Some(sig) = ClosureSubst(substs).sig_ty(db).callable_sig(db) else { implementation_error!("closure has not callable sig"); }; let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/tests.rs new file mode 100644 index 0000000000000..1d7a16ed72d07 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/tests.rs @@ -0,0 +1,64 @@ +use hir_def::db::DefDatabase; +use rustc_hash::FxHashMap; +use span::Edition; +use test_fixture::WithFixture; +use triomphe::Arc; + +use crate::{ + db::HirDatabase, + mir::{MirBody, MirLowerError}, + setup_tracing, + test_db::TestDB, +}; + +fn lower_mir( + #[rust_analyzer::rust_fixture] ra_fixture: &str, +) -> FxHashMap, MirLowerError>> { + let _tracing = setup_tracing(); + let (db, file_ids) = TestDB::with_many_files(ra_fixture); + let file_id = *file_ids.last().unwrap(); + let module_id = db.module_for_file(file_id.file_id(&db)); + let def_map = module_id.def_map(&db); + let scope = &def_map[module_id.local_id].scope; + let funcs = scope.declarations().filter_map(|x| match x { + hir_def::ModuleDefId::FunctionId(it) => Some(it), + _ => None, + }); + funcs + .map(|func| { + let name = db.function_signature(func).name.display(&db, Edition::CURRENT).to_string(); + let mir = db.mir_body(func.into()); + (name, mir) + }) + .collect() +} + +#[test] +fn dyn_projection_with_auto_traits_regression_next_solver() { + lower_mir( + r#" +//- minicore: sized, send +pub trait Deserializer {} + +pub trait Strictest { + type Object: ?Sized; +} + +impl Strictest for dyn CustomValue { + type Object = dyn CustomValue + Send; +} + +pub trait CustomValue: Send {} + +impl CustomValue for () {} + +struct Box; + +type DeserializeFn = fn(&mut dyn Deserializer) -> Box; + +fn foo() { + (|deserializer| Box::new(())) as DeserializeFn<::Object>; +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index d8f443145ca06..555b87850924c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -38,7 +38,6 @@ struct Filler<'a> { trait_env: Arc, subst: &'a Substitution, generics: Option, - owner: DefWithBodyId, } impl FallibleTypeFolder for Filler<'_> { type Error = MirLowerError; @@ -66,7 +65,11 @@ impl FallibleTypeFolder for Filler<'_> { })) .intern(Interner)) } - TyKind::OpaqueType(id, subst) => { + TyKind::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { + opaque_ty_id: id, + substitution: subst, + })) + | TyKind::OpaqueType(id, subst) => { let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into()); let subst = subst.clone().try_fold_with(self.as_dyn(), outer_binder)?; match impl_trait_id { @@ -74,7 +77,6 @@ impl FallibleTypeFolder for Filler<'_> { let infer = self.db.infer(func.into()); let filler = &mut Filler { db: self.db, - owner: self.owner, trait_env: self.trait_env.clone(), subst: &subst, generics: Some(generics(self.db, func.into())), @@ -99,7 +101,7 @@ impl FallibleTypeFolder for Filler<'_> { idx: chalk_ir::PlaceholderIndex, _outer_binder: DebruijnIndex, ) -> std::result::Result, Self::Error> { - let it = from_placeholder_idx(self.db, idx); + let it = from_placeholder_idx(self.db, idx).0; let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else { not_supported!("missing idx in generics"); }; @@ -117,7 +119,7 @@ impl FallibleTypeFolder for Filler<'_> { idx: chalk_ir::PlaceholderIndex, _outer_binder: DebruijnIndex, ) -> std::result::Result { - let it = from_placeholder_idx(self.db, idx); + let it = from_placeholder_idx(self.db, idx).0; let Some(idx) = self.generics.as_ref().and_then(|g| g.type_or_const_param_idx(it)) else { not_supported!("missing idx in generics"); }; @@ -306,7 +308,7 @@ pub fn monomorphized_mir_body_query( trait_env: Arc, ) -> Result, MirLowerError> { let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def)); - let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; + let filler = &mut Filler { db, subst: &subst, trait_env, generics }; let body = db.mir_body(owner)?; let mut body = (*body).clone(); filler.fill_body(&mut body)?; @@ -330,23 +332,9 @@ pub fn monomorphized_mir_body_for_closure_query( ) -> Result, MirLowerError> { let InternedClosure(owner, _) = db.lookup_intern_closure(closure); let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def)); - let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; + let filler = &mut Filler { db, subst: &subst, trait_env, generics }; let body = db.mir_body_for_closure(closure)?; let mut body = (*body).clone(); filler.fill_body(&mut body)?; Ok(Arc::new(body)) } - -// FIXME: remove this function. Monomorphization is a time consuming job and should always be a query. -pub fn monomorphize_mir_body_bad( - db: &dyn HirDatabase, - mut body: MirBody, - subst: Substitution, - trait_env: Arc, -) -> Result { - let owner = body.owner; - let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def)); - let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; - filler.fill_body(&mut body)?; - Ok(body) -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs new file mode 100644 index 0000000000000..073a02908deeb --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs @@ -0,0 +1,48 @@ +//! Things relevant to the next trait solver. +#![allow(unused, unreachable_pub)] + +pub mod abi; +mod consts; +mod def_id; +pub mod fold; +pub mod fulfill; +mod generic_arg; +pub mod generics; +pub mod infer; +pub(crate) mod inspect; +pub mod interner; +mod ir_print; +pub mod mapping; +mod normalize; +pub mod obligation_ctxt; +mod opaques; +pub mod predicate; +mod region; +mod solver; +mod structural_normalize; +mod ty; +pub mod util; + +pub use consts::*; +pub use def_id::*; +pub use generic_arg::*; +pub use interner::*; +pub use opaques::*; +pub use predicate::*; +pub use region::*; +pub use solver::*; +pub use ty::*; + +pub type Binder<'db, T> = rustc_type_ir::Binder, T>; +pub type EarlyBinder<'db, T> = rustc_type_ir::EarlyBinder, T>; +pub type Canonical<'db, T> = rustc_type_ir::Canonical, T>; +pub type CanonicalVarValues<'db> = rustc_type_ir::CanonicalVarValues>; +pub type CanonicalVarKind<'db> = rustc_type_ir::CanonicalVarKind>; +pub type CanonicalQueryInput<'db, V> = rustc_type_ir::CanonicalQueryInput, V>; +pub type AliasTy<'db> = rustc_type_ir::AliasTy>; +pub type FnSig<'db> = rustc_type_ir::FnSig>; +pub type PolyFnSig<'db> = Binder<'db, rustc_type_ir::FnSig>>; +pub type TypingMode<'db> = rustc_type_ir::TypingMode>; +pub type TypeError<'db> = rustc_type_ir::error::TypeError>; +pub type QueryResult<'db> = rustc_type_ir::solve::QueryResult>; +pub type FxIndexMap = rustc_type_ir::data_structures::IndexMap; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/abi.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/abi.rs new file mode 100644 index 0000000000000..80d1ea4aa4d00 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/abi.rs @@ -0,0 +1,68 @@ +//! ABI-related things in the next-trait-solver. +use rustc_type_ir::{error::TypeError, relate::Relate}; + +use crate::FnAbi; + +use super::interner::DbInterner; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub enum Safety { + Unsafe, + Safe, +} + +impl<'db> Relate> for Safety { + fn relate>>( + _relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + if a != b { + Err(TypeError::SafetyMismatch(rustc_type_ir::error::ExpectedFound::new(a, b))) + } else { + Ok(a) + } + } +} + +impl<'db> rustc_type_ir::inherent::Safety> for Safety { + fn safe() -> Self { + Self::Safe + } + + fn is_safe(self) -> bool { + matches!(self, Safety::Safe) + } + + fn prefix_str(self) -> &'static str { + match self { + Self::Unsafe => "unsafe ", + Self::Safe => "", + } + } +} + +impl<'db> Relate> for FnAbi { + fn relate>>( + _relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + if a == b { + Ok(a) + } else { + Err(TypeError::AbiMismatch(rustc_type_ir::error::ExpectedFound::new(a, b))) + } + } +} + +impl<'db> rustc_type_ir::inherent::Abi> for FnAbi { + fn rust() -> Self { + FnAbi::Rust + } + + fn is_rust(self) -> bool { + // TODO: rustc does not consider `RustCall` to be true here, but Chalk does + matches!(self, FnAbi::Rust | FnAbi::RustCall) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs new file mode 100644 index 0000000000000..7ebefa76ed029 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs @@ -0,0 +1,461 @@ +//! Things related to consts in the next-trait-solver. + +use std::hash::Hash; + +use hir_def::{ConstParamId, TypeOrConstParamId}; +use intern::{Interned, Symbol}; +use rustc_ast_ir::{try_visit, visit::VisitorResult}; +use rustc_type_ir::{ + BoundVar, FlagComputation, Flags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, + TypeVisitable, TypeVisitableExt, WithCachedTypeInfo, + inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike}, + relate::Relate, +}; + +use crate::{ + ConstScalar, MemoryMap, + interner::InternedWrapperNoDebug, + next_solver::{ClauseKind, ParamEnv}, +}; + +use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, Ty}; + +pub type ConstKind<'db> = rustc_type_ir::ConstKind>; +pub type UnevaluatedConst<'db> = rustc_type_ir::UnevaluatedConst>; + +#[salsa::interned(constructor = new_, debug)] +pub struct Const<'db> { + #[returns(ref)] + kind_: InternedWrapperNoDebug>>, +} + +impl<'db> Const<'db> { + pub fn new(interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self { + let flags = FlagComputation::for_const_kind(&kind); + let cached = WithCachedTypeInfo { + internee: kind, + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + #[cfg(feature = "in-rust-tree")] + stable_hash: ena::fingerprint::Fingerprint::ZERO, + }; + Const::new_(interner.db(), InternedWrapperNoDebug(cached)) + } + + pub fn inner(&self) -> &WithCachedTypeInfo> { + salsa::with_attached_database(|db| { + let inner = &self.kind_(db).0; + // SAFETY: The caller already has access to a `Const<'db>`, so borrowchecking will + // make sure that our returned value is valid for the lifetime `'db`. + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } + + pub fn error(interner: DbInterner<'db>) -> Self { + Const::new(interner, rustc_type_ir::ConstKind::Error(ErrorGuaranteed)) + } + + pub fn new_param(interner: DbInterner<'db>, param: ParamConst) -> Self { + Const::new(interner, rustc_type_ir::ConstKind::Param(param)) + } + + pub fn new_placeholder(interner: DbInterner<'db>, placeholder: PlaceholderConst) -> Self { + Const::new(interner, ConstKind::Placeholder(placeholder)) + } + + pub fn is_ct_infer(&self) -> bool { + matches!(&self.inner().internee, ConstKind::Infer(_)) + } + + pub fn is_trivially_wf(self) -> bool { + match self.kind() { + ConstKind::Param(_) | ConstKind::Placeholder(_) | ConstKind::Bound(..) => true, + ConstKind::Infer(_) + | ConstKind::Unevaluated(..) + | ConstKind::Value(_) + | ConstKind::Error(_) + | ConstKind::Expr(_) => false, + } + } +} + +impl<'db> std::fmt::Debug for InternedWrapperNoDebug>> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.internee.fmt(f) + } +} + +pub type PlaceholderConst = Placeholder; + +#[derive(Copy, Clone, Hash, Eq, PartialEq)] +pub struct ParamConst { + // FIXME: See `ParamTy`. + pub id: ConstParamId, + pub index: u32, +} + +impl std::fmt::Debug for ParamConst { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "#{}", self.index) + } +} + +impl ParamConst { + pub fn find_const_ty_from_env<'db>(self, env: ParamEnv<'db>) -> Ty<'db> { + let mut candidates = env.caller_bounds().iter().filter_map(|clause| { + // `ConstArgHasType` are never desugared to be higher ranked. + match clause.kind().skip_binder() { + ClauseKind::ConstArgHasType(param_ct, ty) => { + assert!(!(param_ct, ty).has_escaping_bound_vars()); + + match param_ct.kind() { + ConstKind::Param(param_ct) if param_ct.index == self.index => Some(ty), + _ => None, + } + } + _ => None, + } + }); + + // N.B. it may be tempting to fix ICEs by making this function return + // `Option>` instead of `Ty<'db>`; however, this is generally + // considered to be a bandaid solution, since it hides more important + // underlying issues with how we construct generics and predicates of + // items. It's advised to fix the underlying issue rather than trying + // to modify this function. + let ty = candidates.next().unwrap_or_else(|| { + panic!("cannot find `{self:?}` in param-env: {env:#?}"); + }); + assert!( + candidates.next().is_none(), + "did not expect duplicate `ConstParamHasTy` for `{self:?}` in param-env: {env:#?}" + ); + ty + } +} + +/// A type-level constant value. +/// +/// Represents a typed, fully evaluated constant. +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] +pub struct ValueConst<'db> { + pub(crate) ty: Ty<'db>, + pub(crate) value: Valtree<'db>, +} + +impl<'db> ValueConst<'db> { + pub fn new(ty: Ty<'db>, bytes: ConstBytes<'db>) -> Self { + let value = Valtree::new(bytes); + ValueConst { ty, value } + } +} + +impl<'db> rustc_type_ir::inherent::ValueConst> for ValueConst<'db> { + fn ty(self) -> Ty<'db> { + self.ty + } + + fn valtree(self) -> Valtree<'db> { + self.value + } +} + +impl<'db> rustc_type_ir::TypeVisitable> for ValueConst<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + self.ty.visit_with(visitor) + } +} + +impl<'db> rustc_type_ir::TypeFoldable> for ValueConst<'db> { + fn fold_with>>(self, folder: &mut F) -> Self { + ValueConst { ty: self.ty.fold_with(folder), value: self.value } + } + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(ValueConst { ty: self.ty.try_fold_with(folder)?, value: self.value }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ConstBytes<'db>(pub Box<[u8]>, pub MemoryMap<'db>); + +impl Hash for ConstBytes<'_> { + fn hash(&self, state: &mut H) { + self.0.hash(state) + } +} + +#[salsa::interned(constructor = new_, debug)] +pub struct Valtree<'db> { + #[returns(ref)] + bytes_: ConstBytes<'db>, +} + +impl<'db> Valtree<'db> { + pub fn new(bytes: ConstBytes<'db>) -> Self { + salsa::with_attached_database(|db| unsafe { + // SAFETY: ¯\_(ツ)_/¯ + std::mem::transmute(Valtree::new_(db, bytes)) + }) + .unwrap() + } + + pub fn inner(&self) -> &ConstBytes<'db> { + salsa::with_attached_database(|db| { + let inner = self.bytes_(db); + // SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will + // make sure that our returned value is valid for the lifetime `'db`. + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } +} + +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub struct ExprConst; + +impl rustc_type_ir::inherent::ParamLike for ParamConst { + fn index(self) -> u32 { + self.index + } +} + +impl<'db> IntoKind for Const<'db> { + type Kind = ConstKind<'db>; + + fn kind(self) -> Self::Kind { + self.inner().internee + } +} + +impl<'db> TypeVisitable> for Const<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + visitor.visit_const(*self) + } +} + +impl<'db> TypeSuperVisitable> for Const<'db> { + fn super_visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + match self.kind() { + ConstKind::Unevaluated(uv) => uv.visit_with(visitor), + ConstKind::Value(v) => v.visit_with(visitor), + ConstKind::Expr(e) => e.visit_with(visitor), + ConstKind::Error(e) => e.visit_with(visitor), + + ConstKind::Param(_) + | ConstKind::Infer(_) + | ConstKind::Bound(..) + | ConstKind::Placeholder(_) => V::Result::output(), + } + } +} + +impl<'db> TypeFoldable> for Const<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + folder.try_fold_const(self) + } + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_const(self) + } +} + +impl<'db> TypeSuperFoldable> for Const<'db> { + fn try_super_fold_with>>( + self, + folder: &mut F, + ) -> Result { + let kind = match self.kind() { + ConstKind::Unevaluated(uv) => ConstKind::Unevaluated(uv.try_fold_with(folder)?), + ConstKind::Value(v) => ConstKind::Value(v.try_fold_with(folder)?), + ConstKind::Expr(e) => ConstKind::Expr(e.try_fold_with(folder)?), + + ConstKind::Param(_) + | ConstKind::Infer(_) + | ConstKind::Bound(..) + | ConstKind::Placeholder(_) + | ConstKind::Error(_) => return Ok(self), + }; + if kind != self.kind() { Ok(Const::new(folder.cx(), kind)) } else { Ok(self) } + } + fn super_fold_with>>( + self, + folder: &mut F, + ) -> Self { + let kind = match self.kind() { + ConstKind::Unevaluated(uv) => ConstKind::Unevaluated(uv.fold_with(folder)), + ConstKind::Value(v) => ConstKind::Value(v.fold_with(folder)), + ConstKind::Expr(e) => ConstKind::Expr(e.fold_with(folder)), + + ConstKind::Param(_) + | ConstKind::Infer(_) + | ConstKind::Bound(..) + | ConstKind::Placeholder(_) + | ConstKind::Error(_) => return self, + }; + if kind != self.kind() { Const::new(folder.cx(), kind) } else { self } + } +} + +impl<'db> Relate> for Const<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + relation.consts(a, b) + } +} + +impl<'db> Flags for Const<'db> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + self.inner().flags + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + self.inner().outer_exclusive_binder + } +} + +impl<'db> rustc_type_ir::inherent::Const> for Const<'db> { + fn new_infer(interner: DbInterner<'db>, var: rustc_type_ir::InferConst) -> Self { + Const::new(interner, ConstKind::Infer(var)) + } + + fn new_var(interner: DbInterner<'db>, var: rustc_type_ir::ConstVid) -> Self { + Const::new(interner, ConstKind::Infer(rustc_type_ir::InferConst::Var(var))) + } + + fn new_bound( + interner: DbInterner<'db>, + debruijn: rustc_type_ir::DebruijnIndex, + var: BoundConst, + ) -> Self { + Const::new(interner, ConstKind::Bound(debruijn, var)) + } + + fn new_anon_bound( + interner: DbInterner<'db>, + debruijn: rustc_type_ir::DebruijnIndex, + var: rustc_type_ir::BoundVar, + ) -> Self { + Const::new(interner, ConstKind::Bound(debruijn, BoundConst { var })) + } + + fn new_unevaluated( + interner: DbInterner<'db>, + uv: rustc_type_ir::UnevaluatedConst>, + ) -> Self { + Const::new(interner, ConstKind::Unevaluated(uv)) + } + + fn new_expr(interner: DbInterner<'db>, expr: ExprConst) -> Self { + Const::new(interner, ConstKind::Expr(expr)) + } + + fn new_error(interner: DbInterner<'db>, guar: ErrorGuaranteed) -> Self { + Const::new(interner, ConstKind::Error(guar)) + } + + fn new_placeholder( + interner: DbInterner<'db>, + param: as rustc_type_ir::Interner>::PlaceholderConst, + ) -> Self { + Const::new(interner, ConstKind::Placeholder(param)) + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub struct BoundConst { + pub var: BoundVar, +} + +impl<'db> rustc_type_ir::inherent::BoundVarLike> for BoundConst { + fn var(self) -> BoundVar { + self.var + } + + fn assert_eq(self, var: BoundVarKind) { + var.expect_const() + } +} + +impl<'db> PlaceholderLike> for PlaceholderConst { + type Bound = BoundConst; + + fn universe(self) -> rustc_type_ir::UniverseIndex { + self.universe + } + + fn var(self) -> rustc_type_ir::BoundVar { + self.bound.var + } + + fn with_updated_universe(self, ui: rustc_type_ir::UniverseIndex) -> Self { + Placeholder { universe: ui, bound: self.bound } + } + + fn new(ui: rustc_type_ir::UniverseIndex, var: BoundConst) -> Self { + Placeholder { universe: ui, bound: var } + } + fn new_anon(ui: rustc_type_ir::UniverseIndex, var: rustc_type_ir::BoundVar) -> Self { + Placeholder { universe: ui, bound: BoundConst { var } } + } +} + +impl<'db> TypeVisitable> for ExprConst { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + // Ensure we get back to this when we fill in the fields + let ExprConst = &self; + V::Result::output() + } +} + +impl<'db> TypeFoldable> for ExprConst { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(ExprConst) + } + fn fold_with>>(self, folder: &mut F) -> Self { + ExprConst + } +} + +impl<'db> Relate> for ExprConst { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + // Ensure we get back to this when we fill in the fields + let ExprConst = b; + Ok(a) + } +} + +impl<'db> rustc_type_ir::inherent::ExprConst> for ExprConst { + fn args(self) -> as rustc_type_ir::Interner>::GenericArgs { + // Ensure we get back to this when we fill in the fields + let ExprConst = self; + GenericArgs::default() + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs new file mode 100644 index 0000000000000..1ae59beca2728 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs @@ -0,0 +1,262 @@ +//! Definition of `SolverDefId` + +use hir_def::{ + AdtId, CallableDefId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, + StaticId, StructId, TraitId, TypeAliasId, UnionId, +}; +use rustc_type_ir::inherent; +use stdx::impl_from; + +use crate::db::{InternedClosureId, InternedCoroutineId, InternedOpaqueTyId}; + +use super::DbInterner; + +#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)] +pub enum Ctor { + Struct(StructId), + Enum(EnumVariantId), +} + +#[derive(PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)] +pub enum SolverDefId { + AdtId(AdtId), + ConstId(ConstId), + FunctionId(FunctionId), + ImplId(ImplId), + StaticId(StaticId), + TraitId(TraitId), + TypeAliasId(TypeAliasId), + InternedClosureId(InternedClosureId), + InternedCoroutineId(InternedCoroutineId), + InternedOpaqueTyId(InternedOpaqueTyId), + Ctor(Ctor), +} + +impl std::fmt::Debug for SolverDefId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let interner = DbInterner::conjure(); + let db = interner.db; + match *self { + SolverDefId::AdtId(AdtId::StructId(id)) => { + f.debug_tuple("AdtId").field(&db.struct_signature(id).name.as_str()).finish() + } + SolverDefId::AdtId(AdtId::EnumId(id)) => { + f.debug_tuple("AdtId").field(&db.enum_signature(id).name.as_str()).finish() + } + SolverDefId::AdtId(AdtId::UnionId(id)) => { + f.debug_tuple("AdtId").field(&db.union_signature(id).name.as_str()).finish() + } + SolverDefId::ConstId(id) => f + .debug_tuple("ConstId") + .field(&db.const_signature(id).name.as_ref().map_or("_", |name| name.as_str())) + .finish(), + SolverDefId::FunctionId(id) => { + f.debug_tuple("FunctionId").field(&db.function_signature(id).name.as_str()).finish() + } + SolverDefId::ImplId(id) => f.debug_tuple("ImplId").field(&id).finish(), + SolverDefId::StaticId(id) => { + f.debug_tuple("StaticId").field(&db.static_signature(id).name.as_str()).finish() + } + SolverDefId::TraitId(id) => { + f.debug_tuple("TraitId").field(&db.trait_signature(id).name.as_str()).finish() + } + SolverDefId::TypeAliasId(id) => f + .debug_tuple("TypeAliasId") + .field(&db.type_alias_signature(id).name.as_str()) + .finish(), + SolverDefId::InternedClosureId(id) => { + f.debug_tuple("InternedClosureId").field(&id).finish() + } + SolverDefId::InternedCoroutineId(id) => { + f.debug_tuple("InternedCoroutineId").field(&id).finish() + } + SolverDefId::InternedOpaqueTyId(id) => { + f.debug_tuple("InternedOpaqueTyId").field(&id).finish() + } + SolverDefId::Ctor(Ctor::Struct(id)) => { + f.debug_tuple("Ctor").field(&db.struct_signature(id).name.as_str()).finish() + } + SolverDefId::Ctor(Ctor::Enum(id)) => { + let parent_enum = id.loc(db).parent; + f.debug_tuple("Ctor") + .field(&format_args!( + "\"{}::{}\"", + db.enum_signature(parent_enum).name.as_str(), + parent_enum.enum_variants(db).variant_name_by_id(id).unwrap().as_str() + )) + .finish() + } + } + } +} + +impl_from!( + AdtId(StructId, EnumId, UnionId), + ConstId, + FunctionId, + ImplId, + StaticId, + TraitId, + TypeAliasId, + InternedClosureId, + InternedCoroutineId, + InternedOpaqueTyId, + Ctor + for SolverDefId +); + +impl From for SolverDefId { + fn from(value: GenericDefId) -> Self { + match value { + GenericDefId::AdtId(adt_id) => SolverDefId::AdtId(adt_id), + GenericDefId::ConstId(const_id) => SolverDefId::ConstId(const_id), + GenericDefId::FunctionId(function_id) => SolverDefId::FunctionId(function_id), + GenericDefId::ImplId(impl_id) => SolverDefId::ImplId(impl_id), + GenericDefId::StaticId(static_id) => SolverDefId::StaticId(static_id), + GenericDefId::TraitId(trait_id) => SolverDefId::TraitId(trait_id), + GenericDefId::TypeAliasId(type_alias_id) => SolverDefId::TypeAliasId(type_alias_id), + } + } +} + +impl TryFrom for GenericDefId { + type Error = SolverDefId; + + fn try_from(value: SolverDefId) -> Result { + Ok(match value { + SolverDefId::AdtId(adt_id) => GenericDefId::AdtId(adt_id), + SolverDefId::ConstId(const_id) => GenericDefId::ConstId(const_id), + SolverDefId::FunctionId(function_id) => GenericDefId::FunctionId(function_id), + SolverDefId::ImplId(impl_id) => GenericDefId::ImplId(impl_id), + SolverDefId::StaticId(static_id) => GenericDefId::StaticId(static_id), + SolverDefId::TraitId(trait_id) => GenericDefId::TraitId(trait_id), + SolverDefId::TypeAliasId(type_alias_id) => GenericDefId::TypeAliasId(type_alias_id), + SolverDefId::InternedClosureId(_) => return Err(value), + SolverDefId::InternedCoroutineId(_) => return Err(value), + SolverDefId::InternedOpaqueTyId(_) => return Err(value), + SolverDefId::Ctor(_) => return Err(value), + }) + } +} + +impl<'db> inherent::DefId> for SolverDefId { + fn as_local(self) -> Option { + Some(self) + } + fn is_local(self) -> bool { + true + } +} + +macro_rules! declare_id_wrapper { + ($name:ident, $wraps:ident) => { + #[derive(Clone, Copy, PartialEq, Eq, Hash)] + pub struct $name(pub $wraps); + + impl std::fmt::Debug for $name { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(&self.0, f) + } + } + + impl From<$name> for $wraps { + #[inline] + fn from(value: $name) -> $wraps { + value.0 + } + } + + impl From<$wraps> for $name { + #[inline] + fn from(value: $wraps) -> $name { + Self(value) + } + } + + impl From<$name> for SolverDefId { + #[inline] + fn from(value: $name) -> SolverDefId { + value.0.into() + } + } + + impl TryFrom for $name { + type Error = (); + + #[inline] + fn try_from(value: SolverDefId) -> Result { + match value { + SolverDefId::$wraps(it) => Ok(Self(it)), + _ => Err(()), + } + } + } + + impl<'db> inherent::DefId> for $name { + fn as_local(self) -> Option { + Some(self.into()) + } + fn is_local(self) -> bool { + true + } + } + }; +} + +declare_id_wrapper!(TraitIdWrapper, TraitId); +declare_id_wrapper!(TypeAliasIdWrapper, TypeAliasId); +declare_id_wrapper!(ClosureIdWrapper, InternedClosureId); +declare_id_wrapper!(CoroutineIdWrapper, InternedCoroutineId); +declare_id_wrapper!(AdtIdWrapper, AdtId); +declare_id_wrapper!(ImplIdWrapper, ImplId); + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct CallableIdWrapper(pub CallableDefId); + +impl std::fmt::Debug for CallableIdWrapper { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(&self.0, f) + } +} +impl From for CallableDefId { + #[inline] + fn from(value: CallableIdWrapper) -> CallableDefId { + value.0 + } +} +impl From for CallableIdWrapper { + #[inline] + fn from(value: CallableDefId) -> CallableIdWrapper { + Self(value) + } +} +impl From for SolverDefId { + #[inline] + fn from(value: CallableIdWrapper) -> SolverDefId { + match value.0 { + CallableDefId::FunctionId(it) => it.into(), + CallableDefId::StructId(it) => Ctor::Struct(it).into(), + CallableDefId::EnumVariantId(it) => Ctor::Enum(it).into(), + } + } +} +impl TryFrom for CallableIdWrapper { + type Error = (); + #[inline] + fn try_from(value: SolverDefId) -> Result { + match value { + SolverDefId::FunctionId(it) => Ok(Self(it.into())), + SolverDefId::Ctor(Ctor::Struct(it)) => Ok(Self(it.into())), + SolverDefId::Ctor(Ctor::Enum(it)) => Ok(Self(it.into())), + _ => Err(()), + } + } +} +impl<'db> inherent::DefId> for CallableIdWrapper { + fn as_local(self) -> Option { + Some(self.into()) + } + fn is_local(self) -> bool { + true + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs new file mode 100644 index 0000000000000..405a57d9e898c --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs @@ -0,0 +1,131 @@ +//! Fold impls for the next-trait-solver. + +use rustc_type_ir::{ + BoundVar, DebruijnIndex, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, + TypeVisitableExt, + inherent::{IntoKind, Region as _}, +}; + +use crate::next_solver::BoundConst; + +use super::{ + Binder, BoundRegion, BoundTy, Const, ConstKind, DbInterner, Predicate, Region, Ty, TyKind, +}; + +/// A delegate used when instantiating bound vars. +/// +/// Any implementation must make sure that each bound variable always +/// gets mapped to the same result. `BoundVarReplacer` caches by using +/// a `DelayedMap` which does not cache the first few types it encounters. +pub trait BoundVarReplacerDelegate<'db> { + fn replace_region(&mut self, br: BoundRegion) -> Region<'db>; + fn replace_ty(&mut self, bt: BoundTy) -> Ty<'db>; + fn replace_const(&mut self, bv: BoundConst) -> Const<'db>; +} + +/// A simple delegate taking 3 mutable functions. The used functions must +/// always return the same result for each bound variable, no matter how +/// frequently they are called. +pub struct FnMutDelegate<'db, 'a> { + pub regions: &'a mut (dyn FnMut(BoundRegion) -> Region<'db> + 'a), + pub types: &'a mut (dyn FnMut(BoundTy) -> Ty<'db> + 'a), + pub consts: &'a mut (dyn FnMut(BoundConst) -> Const<'db> + 'a), +} + +impl<'db, 'a> BoundVarReplacerDelegate<'db> for FnMutDelegate<'db, 'a> { + fn replace_region(&mut self, br: BoundRegion) -> Region<'db> { + (self.regions)(br) + } + fn replace_ty(&mut self, bt: BoundTy) -> Ty<'db> { + (self.types)(bt) + } + fn replace_const(&mut self, bv: BoundConst) -> Const<'db> { + (self.consts)(bv) + } +} + +/// Replaces the escaping bound vars (late bound regions or bound types) in a type. +pub(crate) struct BoundVarReplacer<'db, D> { + interner: DbInterner<'db>, + /// As with `RegionFolder`, represents the index of a binder *just outside* + /// the ones we have visited. + current_index: DebruijnIndex, + + delegate: D, +} + +impl<'db, D: BoundVarReplacerDelegate<'db>> BoundVarReplacer<'db, D> { + pub fn new(tcx: DbInterner<'db>, delegate: D) -> Self { + BoundVarReplacer { interner: tcx, current_index: DebruijnIndex::ZERO, delegate } + } +} + +impl<'db, D> TypeFolder> for BoundVarReplacer<'db, D> +where + D: BoundVarReplacerDelegate<'db>, +{ + fn cx(&self) -> DbInterner<'db> { + self.interner + } + + fn fold_binder>>( + &mut self, + t: Binder<'db, T>, + ) -> Binder<'db, T> { + self.current_index.shift_in(1); + let t = t.super_fold_with(self); + self.current_index.shift_out(1); + t + } + + fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { + match t.kind() { + TyKind::Bound(debruijn, bound_ty) if debruijn == self.current_index => { + let ty = self.delegate.replace_ty(bound_ty); + debug_assert!(!ty.has_vars_bound_above(DebruijnIndex::ZERO)); + rustc_type_ir::shift_vars(self.interner, ty, self.current_index.as_u32()) + } + _ => { + if !t.has_vars_bound_at_or_above(self.current_index) { + t + } else { + t.super_fold_with(self) + } + } + } + } + + fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { + match r.kind() { + RegionKind::ReBound(debruijn, br) if debruijn == self.current_index => { + let region = self.delegate.replace_region(br); + if let RegionKind::ReBound(debruijn1, br) = region.kind() { + // If the callback returns a bound region, + // that region should always use the INNERMOST + // debruijn index. Then we adjust it to the + // correct depth. + assert_eq!(debruijn1, DebruijnIndex::ZERO); + Region::new_bound(self.interner, debruijn, br) + } else { + region + } + } + _ => r, + } + } + + fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> { + match ct.kind() { + ConstKind::Bound(debruijn, bound_const) if debruijn == self.current_index => { + let ct = self.delegate.replace_const(bound_const); + debug_assert!(!ct.has_vars_bound_above(DebruijnIndex::ZERO)); + rustc_type_ir::shift_vars(self.interner, ct, self.current_index.as_u32()) + } + _ => ct.super_fold_with(self), + } + } + + fn fold_predicate(&mut self, p: Predicate<'db>) -> Predicate<'db> { + if p.has_vars_bound_at_or_above(self.current_index) { p.super_fold_with(self) } else { p } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs new file mode 100644 index 0000000000000..34dff37972e7e --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs @@ -0,0 +1,344 @@ +//! Fulfill loop for next-solver. + +mod errors; + +use std::{marker::PhantomData, mem, ops::ControlFlow, vec::ExtractIf}; + +use rustc_hash::FxHashSet; +use rustc_next_trait_solver::{ + delegate::SolverDelegate, + solve::{GoalEvaluation, GoalStalledOn, HasChanged, SolverDelegateEvalExt}, +}; +use rustc_type_ir::{ + Interner, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, + inherent::{IntoKind, Span as _}, + solve::{Certainty, NoSolution}, +}; + +use crate::next_solver::{ + DbInterner, SolverContext, SolverDefId, Span, Ty, TyKind, TypingMode, + infer::{ + InferCtxt, + traits::{PredicateObligation, PredicateObligations}, + }, + inspect::ProofTreeVisitor, +}; + +type PendingObligations<'db> = + Vec<(PredicateObligation<'db>, Option>>)>; + +/// A trait engine using the new trait solver. +/// +/// This is mostly identical to how `evaluate_all` works inside of the +/// solver, except that the requirements are slightly different. +/// +/// Unlike `evaluate_all` it is possible to add new obligations later on +/// and we also have to track diagnostics information by using `Obligation` +/// instead of `Goal`. +/// +/// It is also likely that we want to use slightly different datastructures +/// here as this will have to deal with far more root goals than `evaluate_all`. +#[derive(Debug, Clone)] +pub struct FulfillmentCtxt<'db> { + obligations: ObligationStorage<'db>, + + /// The snapshot in which this context was created. Using the context + /// outside of this snapshot leads to subtle bugs if the snapshot + /// gets rolled back. Because of this we explicitly check that we only + /// use the context in exactly this snapshot. + usable_in_snapshot: usize, +} + +#[derive(Default, Debug, Clone)] +struct ObligationStorage<'db> { + /// Obligations which resulted in an overflow in fulfillment itself. + /// + /// We cannot eagerly return these as error so we instead store them here + /// to avoid recomputing them each time `select_where_possible` is called. + /// This also allows us to return the correct `FulfillmentError` for them. + overflowed: Vec>, + pending: PendingObligations<'db>, +} + +impl<'db> ObligationStorage<'db> { + fn register( + &mut self, + obligation: PredicateObligation<'db>, + stalled_on: Option>>, + ) { + self.pending.push((obligation, stalled_on)); + } + + fn has_pending_obligations(&self) -> bool { + !self.pending.is_empty() || !self.overflowed.is_empty() + } + + fn clone_pending(&self) -> PredicateObligations<'db> { + let mut obligations: PredicateObligations<'db> = + self.pending.iter().map(|(o, _)| o.clone()).collect(); + obligations.extend(self.overflowed.iter().cloned()); + obligations + } + + fn drain_pending( + &mut self, + cond: impl Fn(&PredicateObligation<'db>) -> bool, + ) -> PendingObligations<'db> { + let (not_stalled, pending) = + mem::take(&mut self.pending).into_iter().partition(|(o, _)| cond(o)); + self.pending = pending; + not_stalled + } + + fn on_fulfillment_overflow(&mut self, infcx: &InferCtxt<'db>) { + infcx.probe(|_| { + // IMPORTANT: we must not use solve any inference variables in the obligations + // as this is all happening inside of a probe. We use a probe to make sure + // we get all obligations involved in the overflow. We pretty much check: if + // we were to do another step of `select_where_possible`, which goals would + // change. + // FIXME: is merged, this can be removed. + self.overflowed.extend( + self.pending + .extract_if(.., |(o, stalled_on)| { + let goal = o.as_goal(); + let result = <&SolverContext<'db>>::from(infcx).evaluate_root_goal( + goal, + Span::dummy(), + stalled_on.take(), + ); + matches!(result, Ok(GoalEvaluation { has_changed: HasChanged::Yes, .. })) + }) + .map(|(o, _)| o), + ); + }) + } +} + +impl<'db> FulfillmentCtxt<'db> { + pub fn new(infcx: &InferCtxt<'db>) -> FulfillmentCtxt<'db> { + FulfillmentCtxt { + obligations: Default::default(), + usable_in_snapshot: infcx.num_open_snapshots(), + } + } +} + +impl<'db> FulfillmentCtxt<'db> { + #[tracing::instrument(level = "trace", skip(self, infcx))] + pub(crate) fn register_predicate_obligation( + &mut self, + infcx: &InferCtxt<'db>, + obligation: PredicateObligation<'db>, + ) { + // FIXME: See the comment in `select_where_possible()`. + // assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots()); + self.obligations.register(obligation, None); + } + + pub(crate) fn register_predicate_obligations( + &mut self, + infcx: &InferCtxt<'db>, + obligations: impl IntoIterator>, + ) { + // FIXME: See the comment in `select_where_possible()`. + // assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots()); + obligations.into_iter().for_each(|obligation| self.obligations.register(obligation, None)); + } + + pub(crate) fn collect_remaining_errors( + &mut self, + infcx: &InferCtxt<'db>, + ) -> Vec> { + self.obligations + .pending + .drain(..) + .map(|(obligation, _)| NextSolverError::Ambiguity(obligation)) + .chain(self.obligations.overflowed.drain(..).map(NextSolverError::Overflow)) + .collect() + } + + pub(crate) fn select_where_possible( + &mut self, + infcx: &InferCtxt<'db>, + ) -> Vec> { + // FIXME(next-solver): We should bring this assertion back. Currently it panics because + // there are places which use `InferenceTable` and open a snapshot and register obligations + // and select. They should use a different `ObligationCtxt` instead. Then we'll be also able + // to not put the obligations queue in `InferenceTable`'s snapshots. + // assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots()); + let mut errors = Vec::new(); + loop { + let mut any_changed = false; + for (mut obligation, stalled_on) in self.obligations.drain_pending(|_| true) { + if obligation.recursion_depth >= infcx.interner.recursion_limit() { + self.obligations.on_fulfillment_overflow(infcx); + // Only return true errors that we have accumulated while processing. + return errors; + } + + let goal = obligation.as_goal(); + let delegate = <&SolverContext<'db>>::from(infcx); + if let Some(certainty) = delegate.compute_goal_fast_path(goal, Span::dummy()) { + match certainty { + Certainty::Yes => {} + Certainty::Maybe { .. } => { + self.obligations.register(obligation, None); + } + } + continue; + } + + let result = delegate.evaluate_root_goal(goal, Span::dummy(), stalled_on); + let GoalEvaluation { goal: _, certainty, has_changed, stalled_on } = match result { + Ok(result) => result, + Err(NoSolution) => { + errors.push(NextSolverError::TrueError(obligation)); + continue; + } + }; + + if has_changed == HasChanged::Yes { + // We increment the recursion depth here to track the number of times + // this goal has resulted in inference progress. This doesn't precisely + // model the way that we track recursion depth in the old solver due + // to the fact that we only process root obligations, but it is a good + // approximation and should only result in fulfillment overflow in + // pathological cases. + obligation.recursion_depth += 1; + any_changed = true; + } + + match certainty { + Certainty::Yes => {} + Certainty::Maybe { .. } => self.obligations.register(obligation, stalled_on), + } + } + + if !any_changed { + break; + } + } + + errors + } + + pub(crate) fn select_all_or_error( + &mut self, + infcx: &InferCtxt<'db>, + ) -> Vec> { + let errors = self.select_where_possible(infcx); + if !errors.is_empty() { + return errors; + } + + self.collect_remaining_errors(infcx) + } + + fn has_pending_obligations(&self) -> bool { + self.obligations.has_pending_obligations() + } + + pub(crate) fn pending_obligations(&self) -> PredicateObligations<'db> { + self.obligations.clone_pending() + } + + pub(crate) fn drain_stalled_obligations_for_coroutines( + &mut self, + infcx: &InferCtxt<'db>, + ) -> PredicateObligations<'db> { + let stalled_coroutines = match infcx.typing_mode() { + TypingMode::Analysis { defining_opaque_types_and_generators } => { + defining_opaque_types_and_generators + } + TypingMode::Coherence + | TypingMode::Borrowck { defining_opaque_types: _ } + | TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ } + | TypingMode::PostAnalysis => return Default::default(), + }; + let stalled_coroutines = stalled_coroutines.inner(); + + if stalled_coroutines.is_empty() { + return Default::default(); + } + + self.obligations + .drain_pending(|obl| { + infcx.probe(|_| { + infcx + .visit_proof_tree( + obl.as_goal(), + &mut StalledOnCoroutines { + stalled_coroutines, + cache: Default::default(), + }, + ) + .is_break() + }) + }) + .into_iter() + .map(|(o, _)| o) + .collect() + } +} + +/// Detect if a goal is stalled on a coroutine that is owned by the current typeck root. +/// +/// This function can (erroneously) fail to detect a predicate, i.e. it doesn't need to +/// be complete. However, this will lead to ambiguity errors, so we want to make it +/// accurate. +/// +/// This function can be also return false positives, which will lead to poor diagnostics +/// so we want to keep this visitor *precise* too. +pub struct StalledOnCoroutines<'a, 'db> { + pub stalled_coroutines: &'a [SolverDefId], + pub cache: FxHashSet>, +} + +impl<'db> ProofTreeVisitor<'db> for StalledOnCoroutines<'_, 'db> { + type Result = ControlFlow<()>; + + fn visit_goal(&mut self, inspect_goal: &super::inspect::InspectGoal<'_, 'db>) -> Self::Result { + inspect_goal.goal().predicate.visit_with(self)?; + + if let Some(candidate) = inspect_goal.unique_applicable_candidate() { + candidate.visit_nested_no_probe(self) + } else { + ControlFlow::Continue(()) + } + } +} + +impl<'db> TypeVisitor> for StalledOnCoroutines<'_, 'db> { + type Result = ControlFlow<()>; + + fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result { + if !self.cache.insert(ty) { + return ControlFlow::Continue(()); + } + + if let TyKind::Coroutine(def_id, _) = ty.kind() + && self.stalled_coroutines.contains(&def_id.into()) + { + ControlFlow::Break(()) + } else if ty.has_coroutines() { + ty.super_visit_with(self) + } else { + ControlFlow::Continue(()) + } + } +} + +#[derive(Debug)] +pub enum NextSolverError<'db> { + TrueError(PredicateObligation<'db>), + Ambiguity(PredicateObligation<'db>), + Overflow(PredicateObligation<'db>), +} + +impl NextSolverError<'_> { + #[inline] + pub fn is_true_error(&self) -> bool { + matches!(self, NextSolverError::TrueError(_)) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs new file mode 100644 index 0000000000000..ab4a229fbc05f --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs @@ -0,0 +1,1336 @@ +//! Trait solving error diagnosis and reporting. +//! +//! This code isn't used by rust-analyzer (it should, but then it'll probably be better to re-port it from rustc). +//! It's only there because without it, debugging trait solver errors is a nightmare. + +use std::{fmt::Debug, ops::ControlFlow}; + +use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt}; +use rustc_type_ir::{ + AliasRelationDirection, AliasTermKind, HostEffectPredicate, Interner, PredicatePolarity, + error::ExpectedFound, + inherent::{IntoKind, PlaceholderConst, SliceLike, Span as _}, + lang_items::SolverTraitLangItem, + solve::{CandidateSource, Certainty, GoalSource, MaybeCause, NoSolution}, +}; +use tracing::{instrument, trace}; + +use crate::next_solver::{ + AliasTerm, Binder, ClauseKind, Const, ConstKind, DbInterner, PolyTraitPredicate, PredicateKind, + SolverContext, SolverDefId, Span, Term, TraitPredicate, Ty, TyKind, TypeError, + fulfill::NextSolverError, + infer::{ + InferCtxt, + select::SelectionError, + traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations}, + }, + inspect::{self, ProofTreeVisitor}, + normalize::deeply_normalize_for_diagnostics, +}; + +#[derive(Debug)] +pub struct FulfillmentError<'db> { + pub obligation: PredicateObligation<'db>, + pub code: FulfillmentErrorCode<'db>, + /// Diagnostics only: the 'root' obligation which resulted in + /// the failure to process `obligation`. This is the obligation + /// that was initially passed to `register_predicate_obligation` + pub root_obligation: PredicateObligation<'db>, +} + +impl<'db> FulfillmentError<'db> { + pub fn new( + obligation: PredicateObligation<'db>, + code: FulfillmentErrorCode<'db>, + root_obligation: PredicateObligation<'db>, + ) -> FulfillmentError<'db> { + FulfillmentError { obligation, code, root_obligation } + } + + pub fn is_true_error(&self) -> bool { + match self.code { + FulfillmentErrorCode::Select(_) + | FulfillmentErrorCode::Project(_) + | FulfillmentErrorCode::Subtype(_, _) + | FulfillmentErrorCode::ConstEquate(_, _) => true, + FulfillmentErrorCode::Cycle(_) | FulfillmentErrorCode::Ambiguity { overflow: _ } => { + false + } + } + } +} + +#[derive(Debug, Clone)] +pub enum FulfillmentErrorCode<'db> { + /// Inherently impossible to fulfill; this trait is implemented if and only + /// if it is already implemented. + Cycle(PredicateObligations<'db>), + Select(SelectionError<'db>), + Project(MismatchedProjectionTypes<'db>), + Subtype(ExpectedFound>, TypeError<'db>), // always comes from a SubtypePredicate + ConstEquate(ExpectedFound>, TypeError<'db>), + Ambiguity { + /// Overflow is only `Some(suggest_recursion_limit)` when using the next generation + /// trait solver `-Znext-solver`. With the old solver overflow is eagerly handled by + /// emitting a fatal error instead. + overflow: Option, + }, +} + +#[derive(Debug, Clone)] +pub struct MismatchedProjectionTypes<'db> { + pub err: TypeError<'db>, +} + +pub(super) fn fulfillment_error_for_no_solution<'db>( + infcx: &InferCtxt<'db>, + root_obligation: PredicateObligation<'db>, +) -> FulfillmentError<'db> { + let obligation = find_best_leaf_obligation(infcx, &root_obligation, false); + + let code = match obligation.predicate.kind().skip_binder() { + PredicateKind::Clause(ClauseKind::Projection(_)) => { + FulfillmentErrorCode::Project( + // FIXME: This could be a `Sorts` if the term is a type + MismatchedProjectionTypes { err: TypeError::Mismatch }, + ) + } + PredicateKind::Clause(ClauseKind::ConstArgHasType(ct, expected_ty)) => { + let ct_ty = match ct.kind() { + ConstKind::Unevaluated(uv) => { + infcx.interner.type_of(uv.def).instantiate(infcx.interner, uv.args) + } + ConstKind::Param(param_ct) => param_ct.find_const_ty_from_env(obligation.param_env), + ConstKind::Value(cv) => cv.ty, + kind => panic!( + "ConstArgHasWrongType failed but we don't know how to compute type for {kind:?}" + ), + }; + FulfillmentErrorCode::Select(SelectionError::ConstArgHasWrongType { + ct, + ct_ty, + expected_ty, + }) + } + PredicateKind::NormalizesTo(..) => { + FulfillmentErrorCode::Project(MismatchedProjectionTypes { err: TypeError::Mismatch }) + } + PredicateKind::AliasRelate(_, _, _) => { + FulfillmentErrorCode::Project(MismatchedProjectionTypes { err: TypeError::Mismatch }) + } + PredicateKind::Subtype(pred) => { + let (a, b) = infcx.enter_forall_and_leak_universe( + obligation.predicate.kind().rebind((pred.a, pred.b)), + ); + let expected_found = ExpectedFound::new(a, b); + FulfillmentErrorCode::Subtype(expected_found, TypeError::Sorts(expected_found)) + } + PredicateKind::Coerce(pred) => { + let (a, b) = infcx.enter_forall_and_leak_universe( + obligation.predicate.kind().rebind((pred.a, pred.b)), + ); + let expected_found = ExpectedFound::new(b, a); + FulfillmentErrorCode::Subtype(expected_found, TypeError::Sorts(expected_found)) + } + PredicateKind::Clause(_) | PredicateKind::DynCompatible(_) | PredicateKind::Ambiguous => { + FulfillmentErrorCode::Select(SelectionError::Unimplemented) + } + PredicateKind::ConstEquate(..) => { + panic!("unexpected goal: {obligation:?}") + } + }; + + FulfillmentError { obligation, code, root_obligation } +} + +pub(super) fn fulfillment_error_for_stalled<'db>( + infcx: &InferCtxt<'db>, + root_obligation: PredicateObligation<'db>, +) -> FulfillmentError<'db> { + let (code, refine_obligation) = infcx.probe(|_| { + match <&SolverContext<'db>>::from(infcx).evaluate_root_goal( + root_obligation.as_goal(), + Span::dummy(), + None, + ) { + Ok(GoalEvaluation { + certainty: Certainty::Maybe { cause: MaybeCause::Ambiguity, .. }, + .. + }) => (FulfillmentErrorCode::Ambiguity { overflow: None }, true), + Ok(GoalEvaluation { + certainty: + Certainty::Maybe { + cause: + MaybeCause::Overflow { suggest_increasing_limit, keep_constraints: _ }, + .. + }, + .. + }) => ( + FulfillmentErrorCode::Ambiguity { overflow: Some(suggest_increasing_limit) }, + // Don't look into overflows because we treat overflows weirdly anyways. + // We discard the inference constraints from overflowing goals, so + // recomputing the goal again during `find_best_leaf_obligation` may apply + // inference guidance that makes other goals go from ambig -> pass, for example. + // + // FIXME: We should probably just look into overflows here. + false, + ), + Ok(GoalEvaluation { certainty: Certainty::Yes, .. }) => { + panic!( + "did not expect successful goal when collecting ambiguity errors for `{:?}`", + infcx.resolve_vars_if_possible(root_obligation.predicate), + ) + } + Err(_) => { + panic!( + "did not expect selection error when collecting ambiguity errors for `{:?}`", + infcx.resolve_vars_if_possible(root_obligation.predicate), + ) + } + } + }); + + FulfillmentError { + obligation: if refine_obligation { + find_best_leaf_obligation(infcx, &root_obligation, true) + } else { + root_obligation.clone() + }, + code, + root_obligation, + } +} + +pub(super) fn fulfillment_error_for_overflow<'db>( + infcx: &InferCtxt<'db>, + root_obligation: PredicateObligation<'db>, +) -> FulfillmentError<'db> { + FulfillmentError { + obligation: find_best_leaf_obligation(infcx, &root_obligation, true), + code: FulfillmentErrorCode::Ambiguity { overflow: Some(true) }, + root_obligation, + } +} + +#[instrument(level = "debug", skip(infcx), ret)] +fn find_best_leaf_obligation<'db>( + infcx: &InferCtxt<'db>, + obligation: &PredicateObligation<'db>, + consider_ambiguities: bool, +) -> PredicateObligation<'db> { + let obligation = infcx.resolve_vars_if_possible(obligation.clone()); + // FIXME: we use a probe here as the `BestObligation` visitor does not + // check whether it uses candidates which get shadowed by where-bounds. + // + // We should probably fix the visitor to not do so instead, as this also + // means the leaf obligation may be incorrect. + let obligation = infcx + .fudge_inference_if_ok(|| { + infcx + .visit_proof_tree( + obligation.as_goal(), + &mut BestObligation { obligation: obligation.clone(), consider_ambiguities }, + ) + .break_value() + .ok_or(()) + }) + .unwrap_or(obligation); + deeply_normalize_for_diagnostics(infcx, obligation.param_env, obligation) +} + +struct BestObligation<'db> { + obligation: PredicateObligation<'db>, + consider_ambiguities: bool, +} + +impl<'db> BestObligation<'db> { + fn with_derived_obligation( + &mut self, + derived_obligation: PredicateObligation<'db>, + and_then: impl FnOnce(&mut Self) -> >::Result, + ) -> >::Result { + let old_obligation = std::mem::replace(&mut self.obligation, derived_obligation); + let res = and_then(self); + self.obligation = old_obligation; + res + } + + /// Filter out the candidates that aren't interesting to visit for the + /// purposes of reporting errors. For ambiguities, we only consider + /// candidates that may hold. For errors, we only consider candidates that + /// *don't* hold and which have impl-where clauses that also don't hold. + fn non_trivial_candidates<'a>( + &self, + goal: &'a inspect::InspectGoal<'a, 'db>, + ) -> Vec> { + let mut candidates = goal.candidates(); + match self.consider_ambiguities { + true => { + // If we have an ambiguous obligation, we must consider *all* candidates + // that hold, or else we may guide inference causing other goals to go + // from ambig -> pass/fail. + candidates.retain(|candidate| candidate.result().is_ok()); + } + false => { + // We always handle rigid alias candidates separately as we may not add them for + // aliases whose trait bound doesn't hold. + candidates.retain(|c| !matches!(c.kind(), inspect::ProbeKind::RigidAlias { .. })); + // If we have >1 candidate, one may still be due to "boring" reasons, like + // an alias-relate that failed to hold when deeply evaluated. We really + // don't care about reasons like this. + if candidates.len() > 1 { + candidates.retain(|candidate| { + goal.infcx().probe(|_| { + candidate.instantiate_nested_goals().iter().any(|nested_goal| { + matches!( + nested_goal.source(), + GoalSource::ImplWhereBound + | GoalSource::AliasBoundConstCondition + | GoalSource::InstantiateHigherRanked + | GoalSource::AliasWellFormed + ) && nested_goal.result().is_err() + }) + }) + }); + } + } + } + + candidates + } + + /// HACK: We walk the nested obligations for a well-formed arg manually, + /// since there's nontrivial logic in `wf.rs` to set up an obligation cause. + /// Ideally we'd be able to track this better. + fn visit_well_formed_goal( + &mut self, + candidate: &inspect::InspectCandidate<'_, 'db>, + term: Term<'db>, + ) -> ControlFlow> { + let infcx = candidate.goal().infcx(); + let param_env = candidate.goal().goal().param_env; + + for obligation in wf::unnormalized_obligations(infcx, param_env, term).into_iter().flatten() + { + let nested_goal = candidate + .instantiate_proof_tree_for_nested_goal(GoalSource::Misc, obligation.as_goal()); + // Skip nested goals that aren't the *reason* for our goal's failure. + match (self.consider_ambiguities, nested_goal.result()) { + (true, Ok(Certainty::Maybe { cause: MaybeCause::Ambiguity, .. })) + | (false, Err(_)) => {} + _ => continue, + } + + self.with_derived_obligation(obligation, |this| nested_goal.visit_with(this))?; + } + + ControlFlow::Break(self.obligation.clone()) + } + + /// If a normalization of an associated item or a trait goal fails without trying any + /// candidates it's likely that normalizing its self type failed. We manually detect + /// such cases here. + fn detect_error_in_self_ty_normalization( + &mut self, + goal: &inspect::InspectGoal<'_, 'db>, + self_ty: Ty<'db>, + ) -> ControlFlow> { + assert!(!self.consider_ambiguities); + let interner = goal.infcx().interner; + if let TyKind::Alias(..) = self_ty.kind() { + let infer_term = goal.infcx().next_ty_var(); + let pred = PredicateKind::AliasRelate( + self_ty.into(), + infer_term.into(), + AliasRelationDirection::Equate, + ); + let obligation = Obligation::new( + interner, + self.obligation.cause.clone(), + goal.goal().param_env, + pred, + ); + self.with_derived_obligation(obligation, |this| { + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(interner, pred), + goal.depth() + 1, + this, + ) + }) + } else { + ControlFlow::Continue(()) + } + } + + /// When a higher-ranked projection goal fails, check that the corresponding + /// higher-ranked trait goal holds or not. This is because the process of + /// instantiating and then re-canonicalizing the binder of the projection goal + /// forces us to be unable to see that the leak check failed in the nested + /// `NormalizesTo` goal, so we don't fall back to the rigid projection check + /// that should catch when a projection goal fails due to an unsatisfied trait + /// goal. + fn detect_trait_error_in_higher_ranked_projection( + &mut self, + goal: &inspect::InspectGoal<'_, 'db>, + ) -> ControlFlow> { + let interner = goal.infcx().interner; + if let Some(projection_clause) = goal.goal().predicate.as_projection_clause() + && !projection_clause.bound_vars().is_empty() + { + let pred = projection_clause.map_bound(|proj| proj.projection_term.trait_ref(interner)); + let obligation = Obligation::new( + interner, + self.obligation.cause.clone(), + goal.goal().param_env, + deeply_normalize_for_diagnostics(goal.infcx(), goal.goal().param_env, pred), + ); + self.with_derived_obligation(obligation, |this| { + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(interner, pred), + goal.depth() + 1, + this, + ) + }) + } else { + ControlFlow::Continue(()) + } + } + + /// It is likely that `NormalizesTo` failed without any applicable candidates + /// because the alias is not well-formed. + /// + /// As we only enter `RigidAlias` candidates if the trait bound of the associated type + /// holds, we discard these candidates in `non_trivial_candidates` and always manually + /// check this here. + fn detect_non_well_formed_assoc_item( + &mut self, + goal: &inspect::InspectGoal<'_, 'db>, + alias: AliasTerm<'db>, + ) -> ControlFlow> { + let interner = goal.infcx().interner; + let obligation = Obligation::new( + interner, + self.obligation.cause.clone(), + goal.goal().param_env, + alias.trait_ref(interner), + ); + self.with_derived_obligation(obligation, |this| { + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(interner, alias.trait_ref(interner)), + goal.depth() + 1, + this, + ) + }) + } + + /// If we have no candidates, then it's likely that there is a + /// non-well-formed alias in the goal. + fn detect_error_from_empty_candidates( + &mut self, + goal: &inspect::InspectGoal<'_, 'db>, + ) -> ControlFlow> { + let interner = goal.infcx().interner; + let pred_kind = goal.goal().predicate.kind(); + + match pred_kind.no_bound_vars() { + Some(PredicateKind::Clause(ClauseKind::Trait(pred))) => { + self.detect_error_in_self_ty_normalization(goal, pred.self_ty())?; + } + Some(PredicateKind::NormalizesTo(pred)) => { + if let AliasTermKind::ProjectionTy | AliasTermKind::ProjectionConst = + pred.alias.kind(interner) + { + self.detect_error_in_self_ty_normalization(goal, pred.alias.self_ty())?; + self.detect_non_well_formed_assoc_item(goal, pred.alias)?; + } + } + Some(_) | None => {} + } + + ControlFlow::Break(self.obligation.clone()) + } +} + +impl<'db> ProofTreeVisitor<'db> for BestObligation<'db> { + type Result = ControlFlow>; + + #[instrument(level = "trace", skip(self, goal), fields(goal = ?goal.goal()))] + fn visit_goal(&mut self, goal: &inspect::InspectGoal<'_, 'db>) -> Self::Result { + let interner = goal.infcx().interner; + // Skip goals that aren't the *reason* for our goal's failure. + match (self.consider_ambiguities, goal.result()) { + (true, Ok(Certainty::Maybe { cause: MaybeCause::Ambiguity, .. })) | (false, Err(_)) => { + } + _ => return ControlFlow::Continue(()), + } + + let pred = goal.goal().predicate; + + let candidates = self.non_trivial_candidates(goal); + let candidate = match candidates.as_slice() { + [candidate] => candidate, + [] => return self.detect_error_from_empty_candidates(goal), + _ => return ControlFlow::Break(self.obligation.clone()), + }; + + // // Don't walk into impls that have `do_not_recommend`. + // if let inspect::ProbeKind::TraitCandidate { + // source: CandidateSource::Impl(impl_def_id), + // result: _, + // } = candidate.kind() + // && interner.do_not_recommend_impl(impl_def_id) + // { + // trace!("#[do_not_recommend] -> exit"); + // return ControlFlow::Break(self.obligation.clone()); + // } + + // FIXME: Also, what about considering >1 layer up the stack? May be necessary + // for normalizes-to. + let child_mode = match pred.kind().skip_binder() { + PredicateKind::Clause(ClauseKind::Trait(trait_pred)) => { + ChildMode::Trait(pred.kind().rebind(trait_pred)) + } + PredicateKind::Clause(ClauseKind::HostEffect(host_pred)) => { + ChildMode::Host(pred.kind().rebind(host_pred)) + } + PredicateKind::NormalizesTo(normalizes_to) + if matches!( + normalizes_to.alias.kind(interner), + AliasTermKind::ProjectionTy | AliasTermKind::ProjectionConst + ) => + { + ChildMode::Trait(pred.kind().rebind(TraitPredicate { + trait_ref: normalizes_to.alias.trait_ref(interner), + polarity: PredicatePolarity::Positive, + })) + } + PredicateKind::Clause(ClauseKind::WellFormed(term)) => { + return self.visit_well_formed_goal(candidate, term); + } + _ => ChildMode::PassThrough, + }; + + let nested_goals = candidate.instantiate_nested_goals(); + + // If the candidate requires some `T: FnPtr` bound which does not hold should not be treated as + // an actual candidate, instead we should treat them as if the impl was never considered to + // have potentially applied. As if `impl Trait for for<..> fn(..A) -> R` was written + // instead of `impl Trait for T`. + // + // We do this as a separate loop so that we do not choose to tell the user about some nested + // goal before we encounter a `T: FnPtr` nested goal. + for nested_goal in &nested_goals { + if let Some(poly_trait_pred) = nested_goal.goal().predicate.as_trait_clause() + && interner + .is_trait_lang_item(poly_trait_pred.def_id(), SolverTraitLangItem::FnPtrTrait) + && let Err(NoSolution) = nested_goal.result() + { + return ControlFlow::Break(self.obligation.clone()); + } + } + + let mut impl_where_bound_count = 0; + for nested_goal in nested_goals { + trace!(nested_goal = ?(nested_goal.goal(), nested_goal.source(), nested_goal.result())); + + let nested_pred = nested_goal.goal().predicate; + + let make_obligation = || Obligation { + cause: ObligationCause::dummy(), + param_env: nested_goal.goal().param_env, + predicate: nested_pred, + recursion_depth: self.obligation.recursion_depth + 1, + }; + + let obligation; + match (child_mode, nested_goal.source()) { + ( + ChildMode::Trait(_) | ChildMode::Host(_), + GoalSource::Misc | GoalSource::TypeRelating | GoalSource::NormalizeGoal(_), + ) => { + continue; + } + (ChildMode::Trait(parent_trait_pred), GoalSource::ImplWhereBound) => { + obligation = make_obligation(); + impl_where_bound_count += 1; + } + ( + ChildMode::Host(parent_host_pred), + GoalSource::ImplWhereBound | GoalSource::AliasBoundConstCondition, + ) => { + obligation = make_obligation(); + impl_where_bound_count += 1; + } + // Skip over a higher-ranked predicate. + (_, GoalSource::InstantiateHigherRanked) => { + obligation = self.obligation.clone(); + } + (ChildMode::PassThrough, _) + | (_, GoalSource::AliasWellFormed | GoalSource::AliasBoundConstCondition) => { + obligation = make_obligation(); + } + } + + self.with_derived_obligation(obligation, |this| nested_goal.visit_with(this))?; + } + + // alias-relate may fail because the lhs or rhs can't be normalized, + // and therefore is treated as rigid. + if let Some(PredicateKind::AliasRelate(lhs, rhs, _)) = pred.kind().no_bound_vars() { + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(interner, ClauseKind::WellFormed(lhs)), + goal.depth() + 1, + self, + )?; + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(interner, ClauseKind::WellFormed(rhs)), + goal.depth() + 1, + self, + )?; + } + + self.detect_trait_error_in_higher_ranked_projection(goal)?; + + ControlFlow::Break(self.obligation.clone()) + } +} + +#[derive(Debug, Copy, Clone)] +enum ChildMode<'db> { + // Try to derive an `ObligationCause::{ImplDerived,BuiltinDerived}`, + // and skip all `GoalSource::Misc`, which represent useless obligations + // such as alias-eq which may not hold. + Trait(PolyTraitPredicate<'db>), + // Try to derive an `ObligationCause::{ImplDerived,BuiltinDerived}`, + // and skip all `GoalSource::Misc`, which represent useless obligations + // such as alias-eq which may not hold. + Host(Binder<'db, HostEffectPredicate>>), + // Skip trying to derive an `ObligationCause` from this obligation, and + // report *all* sub-obligations as if they came directly from the parent + // obligation. + PassThrough, +} + +impl<'db> NextSolverError<'db> { + pub fn to_debuggable_error(&self, infcx: &InferCtxt<'db>) -> FulfillmentError<'db> { + match self { + NextSolverError::TrueError(obligation) => { + fulfillment_error_for_no_solution(infcx, obligation.clone()) + } + NextSolverError::Ambiguity(obligation) => { + fulfillment_error_for_stalled(infcx, obligation.clone()) + } + NextSolverError::Overflow(obligation) => { + fulfillment_error_for_overflow(infcx, obligation.clone()) + } + } + } +} + +mod wf { + use std::iter; + + use hir_def::ItemContainerId; + use rustc_type_ir::inherent::{ + AdtDef, BoundExistentialPredicates, GenericArg, GenericArgs as _, IntoKind, SliceLike, + Term as _, Ty as _, + }; + use rustc_type_ir::lang_items::SolverTraitLangItem; + use rustc_type_ir::{ + Interner, PredicatePolarity, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, + TypeVisitor, + }; + use tracing::{debug, instrument, trace}; + + use crate::next_solver::infer::InferCtxt; + use crate::next_solver::infer::traits::{ + Obligation, ObligationCause, PredicateObligation, PredicateObligations, + }; + use crate::next_solver::{ + AliasTerm, Binder, ClauseKind, Const, ConstKind, Ctor, DbInterner, ExistentialPredicate, + GenericArgs, ParamEnv, Predicate, PredicateKind, Region, SolverDefId, Term, TraitPredicate, + TraitRef, Ty, TyKind, + }; + + /// Compute the predicates that are required for a type to be well-formed. + /// + /// This is only intended to be used in the new solver, since it does not + /// take into account recursion depth or proper error-reporting spans. + pub fn unnormalized_obligations<'db>( + infcx: &InferCtxt<'db>, + param_env: ParamEnv<'db>, + term: Term<'db>, + ) -> Option> { + debug_assert_eq!(term, infcx.resolve_vars_if_possible(term)); + + // However, if `arg` IS an unresolved inference variable, returns `None`, + // because we are not able to make any progress at all. This is to prevent + // cycles where we say "?0 is WF if ?0 is WF". + if term.is_infer() { + return None; + } + + let mut wf = + WfPredicates { infcx, param_env, out: PredicateObligations::new(), recursion_depth: 0 }; + wf.add_wf_preds_for_term(term); + Some(wf.out) + } + + struct WfPredicates<'a, 'db> { + infcx: &'a InferCtxt<'db>, + param_env: ParamEnv<'db>, + out: PredicateObligations<'db>, + recursion_depth: usize, + } + + /// Controls whether we "elaborate" supertraits and so forth on the WF + /// predicates. This is a kind of hack to address #43784. The + /// underlying problem in that issue was a trait structure like: + /// + /// ```ignore (illustrative) + /// trait Foo: Copy { } + /// trait Bar: Foo { } + /// impl Foo for T { } + /// impl Bar for T { } + /// ``` + /// + /// Here, in the `Foo` impl, we will check that `T: Copy` holds -- but + /// we decide that this is true because `T: Bar` is in the + /// where-clauses (and we can elaborate that to include `T: + /// Copy`). This wouldn't be a problem, except that when we check the + /// `Bar` impl, we decide that `T: Foo` must hold because of the `Foo` + /// impl. And so nowhere did we check that `T: Copy` holds! + /// + /// To resolve this, we elaborate the WF requirements that must be + /// proven when checking impls. This means that (e.g.) the `impl Bar + /// for T` will be forced to prove not only that `T: Foo` but also `T: + /// Copy` (which it won't be able to do, because there is no `Copy` + /// impl for `T`). + #[derive(Debug, PartialEq, Eq, Copy, Clone)] + enum Elaborate { + All, + None, + } + + impl<'a, 'db> WfPredicates<'a, 'db> { + fn interner(&self) -> DbInterner<'db> { + self.infcx.interner + } + + /// Pushes the obligations required for `trait_ref` to be WF into `self.out`. + fn add_wf_preds_for_trait_pred( + &mut self, + trait_pred: TraitPredicate<'db>, + elaborate: Elaborate, + ) { + let tcx = self.interner(); + let trait_ref = trait_pred.trait_ref; + + // Negative trait predicates don't require supertraits to hold, just + // that their args are WF. + if trait_pred.polarity == PredicatePolarity::Negative { + self.add_wf_preds_for_negative_trait_pred(trait_ref); + return; + } + + // if the trait predicate is not const, the wf obligations should not be const as well. + let obligations = self.nominal_obligations(trait_ref.def_id.0.into(), trait_ref.args); + + debug!("compute_trait_pred obligations {:?}", obligations); + let param_env = self.param_env; + let depth = self.recursion_depth; + + let extend = |PredicateObligation { predicate, mut cause, .. }| { + Obligation::with_depth(tcx, cause, depth, param_env, predicate) + }; + + if let Elaborate::All = elaborate { + let implied_obligations = rustc_type_ir::elaborate::elaborate(tcx, obligations); + let implied_obligations = implied_obligations.map(extend); + self.out.extend(implied_obligations); + } else { + self.out.extend(obligations); + } + + self.out.extend( + trait_ref + .args + .iter() + .enumerate() + .filter_map(|(i, arg)| arg.as_term().map(|t| (i, t))) + .filter(|(_, term)| !term.has_escaping_bound_vars()) + .map(|(i, term)| { + let mut cause = ObligationCause::misc(); + // The first arg is the self ty - use the correct span for it. + Obligation::with_depth( + tcx, + cause, + depth, + param_env, + ClauseKind::WellFormed(term), + ) + }), + ); + } + + // Compute the obligations that are required for `trait_ref` to be WF, + // given that it is a *negative* trait predicate. + fn add_wf_preds_for_negative_trait_pred(&mut self, trait_ref: TraitRef<'db>) { + for arg in trait_ref.args { + if let Some(term) = arg.as_term() { + self.add_wf_preds_for_term(term); + } + } + } + + /// Pushes the obligations required for an alias (except inherent) to be WF + /// into `self.out`. + fn add_wf_preds_for_alias_term(&mut self, data: AliasTerm<'db>) { + // A projection is well-formed if + // + // (a) its predicates hold (*) + // (b) its args are wf + // + // (*) The predicates of an associated type include the predicates of + // the trait that it's contained in. For example, given + // + // trait A: Clone { + // type X where T: Copy; + // } + // + // The predicates of `<() as A>::X` are: + // [ + // `(): Sized` + // `(): Clone` + // `(): A` + // `i32: Sized` + // `i32: Clone` + // `i32: Copy` + // ] + let obligations = self.nominal_obligations(data.def_id, data.args); + self.out.extend(obligations); + + self.add_wf_preds_for_projection_args(data.args); + } + + fn add_wf_preds_for_projection_args(&mut self, args: GenericArgs<'db>) { + let tcx = self.interner(); + let cause = ObligationCause::new(); + let param_env = self.param_env; + let depth = self.recursion_depth; + + self.out.extend( + args.iter() + .filter_map(|arg| arg.as_term()) + .filter(|term| !term.has_escaping_bound_vars()) + .map(|term| { + Obligation::with_depth( + tcx, + cause.clone(), + depth, + param_env, + ClauseKind::WellFormed(term), + ) + }), + ); + } + + fn require_sized(&mut self, subty: Ty<'db>) { + if !subty.has_escaping_bound_vars() { + let cause = ObligationCause::new(); + let trait_ref = TraitRef::new( + self.interner(), + self.interner().require_trait_lang_item(SolverTraitLangItem::Sized), + [subty], + ); + self.out.push(Obligation::with_depth( + self.interner(), + cause, + self.recursion_depth, + self.param_env, + Binder::dummy(trait_ref), + )); + } + } + + /// Pushes all the predicates needed to validate that `term` is WF into `out`. + #[instrument(level = "debug", skip(self))] + fn add_wf_preds_for_term(&mut self, term: Term<'db>) { + term.visit_with(self); + debug!(?self.out); + } + + #[instrument(level = "debug", skip(self))] + fn nominal_obligations( + &mut self, + def_id: SolverDefId, + args: GenericArgs<'db>, + ) -> PredicateObligations<'db> { + // PERF: `Sized`'s predicates include `MetaSized`, but both are compiler implemented marker + // traits, so `MetaSized` will always be WF if `Sized` is WF and vice-versa. Determining + // the nominal obligations of `Sized` would in-effect just elaborate `MetaSized` and make + // the compiler do a bunch of work needlessly. + if let SolverDefId::TraitId(def_id) = def_id + && self.interner().is_trait_lang_item(def_id.into(), SolverTraitLangItem::Sized) + { + return Default::default(); + } + + self.interner() + .predicates_of(def_id) + .iter_instantiated(self.interner(), args) + .map(|pred| { + let cause = ObligationCause::new(); + Obligation::with_depth( + self.interner(), + cause, + self.recursion_depth, + self.param_env, + pred, + ) + }) + .filter(|pred| !pred.has_escaping_bound_vars()) + .collect() + } + + fn add_wf_preds_for_dyn_ty( + &mut self, + ty: Ty<'db>, + data: &[Binder<'db, ExistentialPredicate<'db>>], + region: Region<'db>, + ) { + // Imagine a type like this: + // + // trait Foo { } + // trait Bar<'c> : 'c { } + // + // &'b (Foo+'c+Bar<'d>) + // ^ + // + // In this case, the following relationships must hold: + // + // 'b <= 'c + // 'd <= 'c + // + // The first conditions is due to the normal region pointer + // rules, which say that a reference cannot outlive its + // referent. + // + // The final condition may be a bit surprising. In particular, + // you may expect that it would have been `'c <= 'd`, since + // usually lifetimes of outer things are conservative + // approximations for inner things. However, it works somewhat + // differently with trait objects: here the idea is that if the + // user specifies a region bound (`'c`, in this case) it is the + // "master bound" that *implies* that bounds from other traits are + // all met. (Remember that *all bounds* in a type like + // `Foo+Bar+Zed` must be met, not just one, hence if we write + // `Foo<'x>+Bar<'y>`, we know that the type outlives *both* 'x and + // 'y.) + // + // Note: in fact we only permit builtin traits, not `Bar<'d>`, I + // am looking forward to the future here. + if !data.has_escaping_bound_vars() && !region.has_escaping_bound_vars() { + let implicit_bounds = object_region_bounds(self.interner(), data); + + let explicit_bound = region; + + self.out.reserve(implicit_bounds.len()); + for implicit_bound in implicit_bounds { + let cause = ObligationCause::new(); + let outlives = Binder::dummy(rustc_type_ir::OutlivesPredicate( + explicit_bound, + implicit_bound, + )); + self.out.push(Obligation::with_depth( + self.interner(), + cause, + self.recursion_depth, + self.param_env, + outlives, + )); + } + + // We don't add any wf predicates corresponding to the trait ref's generic arguments + // which allows code like this to compile: + // ```rust + // trait Trait {} + // fn foo(_: &dyn Trait<[u32]>) {} + // ``` + } + } + } + + impl<'a, 'db> TypeVisitor> for WfPredicates<'a, 'db> { + type Result = (); + + fn visit_ty(&mut self, t: Ty<'db>) -> Self::Result { + debug!("wf bounds for t={:?} t.kind={:#?}", t, t.kind()); + + let tcx = self.interner(); + + match t.kind() { + TyKind::Bool + | TyKind::Char + | TyKind::Int(..) + | TyKind::Uint(..) + | TyKind::Float(..) + | TyKind::Error(_) + | TyKind::Str + | TyKind::CoroutineWitness(..) + | TyKind::Never + | TyKind::Param(_) + | TyKind::Bound(..) + | TyKind::Placeholder(..) + | TyKind::Foreign(..) => { + // WfScalar, WfParameter, etc + } + + // Can only infer to `TyKind::Int(_) | TyKind::Uint(_)`. + TyKind::Infer(rustc_type_ir::IntVar(_)) => {} + + // Can only infer to `TyKind::Float(_)`. + TyKind::Infer(rustc_type_ir::FloatVar(_)) => {} + + TyKind::Slice(subty) => { + self.require_sized(subty); + } + + TyKind::Array(subty, len) => { + self.require_sized(subty); + // Note that the len being WF is implicitly checked while visiting. + // Here we just check that it's of type usize. + let cause = ObligationCause::new(); + self.out.push(Obligation::with_depth( + tcx, + cause, + self.recursion_depth, + self.param_env, + Binder::dummy(PredicateKind::Clause(ClauseKind::ConstArgHasType( + len, + Ty::new_unit(self.interner()), + ))), + )); + } + + TyKind::Pat(base_ty, pat) => { + self.require_sized(base_ty); + } + + TyKind::Tuple(tys) => { + if let Some((_last, rest)) = tys.split_last() { + for &elem in rest { + self.require_sized(elem); + } + } + } + + TyKind::RawPtr(_, _) => { + // Simple cases that are WF if their type args are WF. + } + + TyKind::Alias( + rustc_type_ir::Projection | rustc_type_ir::Opaque | rustc_type_ir::Free, + data, + ) => { + let obligations = self.nominal_obligations(data.def_id, data.args); + self.out.extend(obligations); + } + TyKind::Alias(rustc_type_ir::Inherent, data) => { + return; + } + + TyKind::Adt(def, args) => { + // WfNominalType + let obligations = self.nominal_obligations(def.def_id().0.into(), args); + self.out.extend(obligations); + } + + TyKind::FnDef(did, args) => { + // HACK: Check the return type of function definitions for + // well-formedness to mostly fix #84533. This is still not + // perfect and there may be ways to abuse the fact that we + // ignore requirements with escaping bound vars. That's a + // more general issue however. + let fn_sig = tcx.fn_sig(did).instantiate(tcx, args); + fn_sig.output().skip_binder().visit_with(self); + + let did = match did.0 { + hir_def::CallableDefId::FunctionId(id) => id.into(), + hir_def::CallableDefId::StructId(id) => SolverDefId::Ctor(Ctor::Struct(id)), + hir_def::CallableDefId::EnumVariantId(id) => { + SolverDefId::Ctor(Ctor::Enum(id)) + } + }; + let obligations = self.nominal_obligations(did, args); + self.out.extend(obligations); + } + + TyKind::Ref(r, rty, _) => { + // WfReference + if !r.has_escaping_bound_vars() && !rty.has_escaping_bound_vars() { + let cause = ObligationCause::new(); + self.out.push(Obligation::with_depth( + tcx, + cause, + self.recursion_depth, + self.param_env, + Binder::dummy(PredicateKind::Clause(ClauseKind::TypeOutlives( + rustc_type_ir::OutlivesPredicate(rty, r), + ))), + )); + } + } + + TyKind::Coroutine(did, args, ..) => { + // Walk ALL the types in the coroutine: this will + // include the upvar types as well as the yield + // type. Note that this is mildly distinct from + // the closure case, where we have to be careful + // about the signature of the closure. We don't + // have the problem of implied bounds here since + // coroutines don't take arguments. + let obligations = self.nominal_obligations(did.0.into(), args); + self.out.extend(obligations); + } + + TyKind::Closure(did, args) => { + // Note that we cannot skip the generic types + // types. Normally, within the fn + // body where they are created, the generics will + // always be WF, and outside of that fn body we + // are not directly inspecting closure types + // anyway, except via auto trait matching (which + // only inspects the upvar types). + // But when a closure is part of a type-alias-impl-trait + // then the function that created the defining site may + // have had more bounds available than the type alias + // specifies. This may cause us to have a closure in the + // hidden type that is not actually well formed and + // can cause compiler crashes when the user abuses unsafe + // code to procure such a closure. + // See tests/ui/type-alias-impl-trait/wf_check_closures.rs + let obligations = self.nominal_obligations(did.0.into(), args); + self.out.extend(obligations); + // Only check the upvar types for WF, not the rest + // of the types within. This is needed because we + // capture the signature and it may not be WF + // without the implied bounds. Consider a closure + // like `|x: &'a T|` -- it may be that `T: 'a` is + // not known to hold in the creator's context (and + // indeed the closure may not be invoked by its + // creator, but rather turned to someone who *can* + // verify that). + // + // The special treatment of closures here really + // ought not to be necessary either; the problem + // is related to #25860 -- there is no way for us + // to express a fn type complete with the implied + // bounds that it is assuming. I think in reality + // the WF rules around fn are a bit messed up, and + // that is the rot problem: `fn(&'a T)` should + // probably always be WF, because it should be + // shorthand for something like `where(T: 'a) { + // fn(&'a T) }`, as discussed in #25860. + let upvars = args.as_closure().tupled_upvars_ty(); + return upvars.visit_with(self); + } + + TyKind::CoroutineClosure(did, args) => { + // See the above comments. The same apply to coroutine-closures. + let obligations = self.nominal_obligations(did.0.into(), args); + self.out.extend(obligations); + let upvars = args.as_coroutine_closure().tupled_upvars_ty(); + return upvars.visit_with(self); + } + + TyKind::FnPtr(..) => { + // Let the visitor iterate into the argument/return + // types appearing in the fn signature. + } + TyKind::UnsafeBinder(ty) => {} + + TyKind::Dynamic(data, r) => { + // WfObject + // + // Here, we defer WF checking due to higher-ranked + // regions. This is perhaps not ideal. + self.add_wf_preds_for_dyn_ty(t, data.as_slice(), r); + + // FIXME(#27579) RFC also considers adding trait + // obligations that don't refer to Self and + // checking those + if let Some(principal) = data.principal_def_id() { + self.out.push(Obligation::with_depth( + tcx, + ObligationCause::new(), + self.recursion_depth, + self.param_env, + Binder::dummy(PredicateKind::DynCompatible(principal)), + )); + } + } + + // Inference variables are the complicated case, since we don't + // know what type they are. We do two things: + // + // 1. Check if they have been resolved, and if so proceed with + // THAT type. + // 2. If not, we've at least simplified things (e.g., we went + // from `Vec?0>: WF` to `?0: WF`), so we can + // register a pending obligation and keep + // moving. (Goal is that an "inductive hypothesis" + // is satisfied to ensure termination.) + // See also the comment on `fn obligations`, describing cycle + // prevention, which happens before this can be reached. + TyKind::Infer(_) => { + let cause = ObligationCause::new(); + self.out.push(Obligation::with_depth( + tcx, + cause, + self.recursion_depth, + self.param_env, + Binder::dummy(PredicateKind::Clause(ClauseKind::WellFormed(t.into()))), + )); + } + } + + t.super_visit_with(self) + } + + fn visit_const(&mut self, c: Const<'db>) -> Self::Result { + let tcx = self.interner(); + + match c.kind() { + ConstKind::Unevaluated(uv) => { + if !c.has_escaping_bound_vars() { + let predicate = + Binder::dummy(PredicateKind::Clause(ClauseKind::ConstEvaluatable(c))); + let cause = ObligationCause::new(); + self.out.push(Obligation::with_depth( + tcx, + cause, + self.recursion_depth, + self.param_env, + predicate, + )); + + if let SolverDefId::ConstId(uv_def) = uv.def + && let ItemContainerId::ImplId(impl_) = + uv_def.loc(self.interner().db).container + && self.interner().db.impl_signature(impl_).target_trait.is_none() + { + return; // Subtree is handled by above function + } else { + let obligations = self.nominal_obligations(uv.def, uv.args); + self.out.extend(obligations); + } + } + } + ConstKind::Infer(_) => { + let cause = ObligationCause::new(); + + self.out.push(Obligation::with_depth( + tcx, + cause, + self.recursion_depth, + self.param_env, + Binder::dummy(PredicateKind::Clause(ClauseKind::WellFormed(c.into()))), + )); + } + ConstKind::Expr(_) => { + // FIXME(generic_const_exprs): this doesn't verify that given `Expr(N + 1)` the + // trait bound `typeof(N): Add` holds. This is currently unnecessary + // as `ConstKind::Expr` is only produced via normalization of `ConstKind::Unevaluated` + // which means that the `DefId` would have been typeck'd elsewhere. However in + // the future we may allow directly lowering to `ConstKind::Expr` in which case + // we would not be proving bounds we should. + + let predicate = + Binder::dummy(PredicateKind::Clause(ClauseKind::ConstEvaluatable(c))); + let cause = ObligationCause::new(); + self.out.push(Obligation::with_depth( + tcx, + cause, + self.recursion_depth, + self.param_env, + predicate, + )); + } + + ConstKind::Error(_) + | ConstKind::Param(_) + | ConstKind::Bound(..) + | ConstKind::Placeholder(..) => { + // These variants are trivially WF, so nothing to do here. + } + ConstKind::Value(..) => { + // FIXME: Enforce that values are structurally-matchable. + } + } + + c.super_visit_with(self) + } + + fn visit_predicate(&mut self, _p: Predicate<'db>) -> Self::Result { + panic!("predicate should not be checked for well-formedness"); + } + } + + /// Given an object type like `SomeTrait + Send`, computes the lifetime + /// bounds that must hold on the elided self type. These are derived + /// from the declarations of `SomeTrait`, `Send`, and friends -- if + /// they declare `trait SomeTrait : 'static`, for example, then + /// `'static` would appear in the list. + /// + /// N.B., in some cases, particularly around higher-ranked bounds, + /// this function returns a kind of conservative approximation. + /// That is, all regions returned by this function are definitely + /// required, but there may be other region bounds that are not + /// returned, as well as requirements like `for<'a> T: 'a`. + /// + /// Requires that trait definitions have been processed so that we can + /// elaborate predicates and walk supertraits. + pub fn object_region_bounds<'db>( + interner: DbInterner<'db>, + existential_predicates: &[Binder<'db, ExistentialPredicate<'db>>], + ) -> Vec> { + let erased_self_ty = Ty::new_unit(interner); + + let predicates = existential_predicates + .iter() + .map(|predicate| predicate.with_self_ty(interner, erased_self_ty)); + + rustc_type_ir::elaborate::elaborate(interner, predicates) + .filter_map(|pred| { + debug!(?pred); + match pred.kind().skip_binder() { + ClauseKind::TypeOutlives(rustc_type_ir::OutlivesPredicate(ref t, ref r)) => { + // Search for a bound of the form `erased_self_ty + // : 'a`, but be wary of something like `for<'a> + // erased_self_ty : 'a` (we interpret a + // higher-ranked bound like that as 'static, + // though at present the code in `fulfill.rs` + // considers such bounds to be unsatisfiable, so + // it's kind of a moot point since you could never + // construct such an object, but this seems + // correct even if that code changes). + if t == &erased_self_ty && !r.has_escaping_bound_vars() { + Some(*r) + } else { + None + } + } + ClauseKind::Trait(_) + | ClauseKind::HostEffect(..) + | ClauseKind::RegionOutlives(_) + | ClauseKind::Projection(_) + | ClauseKind::ConstArgHasType(_, _) + | ClauseKind::WellFormed(_) + | ClauseKind::UnstableFeature(_) + | ClauseKind::ConstEvaluatable(_) => None, + } + }) + .collect() + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs new file mode 100644 index 0000000000000..097bb85cbd491 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs @@ -0,0 +1,582 @@ +//! Things related to generic args in the next-trait-solver. + +use hir_def::GenericParamId; +use intern::{Interned, Symbol}; +use rustc_type_ir::{ + ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSig, FnSigTys, + GenericArgKind, IntTy, Interner, TermKind, TyKind, TyVid, TypeFoldable, TypeVisitable, + Variance, + inherent::{ + GenericArg as _, GenericArgs as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _, + }, + relate::{Relate, VarianceDiagInfo}, +}; +use smallvec::SmallVec; + +use crate::db::HirDatabase; +use crate::next_solver::{Binder, PolyFnSig}; + +use super::{ + Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys, + generics::{GenericParamDef, Generics}, + interned_vec_db, +}; + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub enum GenericArg<'db> { + Ty(Ty<'db>), + Lifetime(Region<'db>), + Const(Const<'db>), +} + +impl<'db> std::fmt::Debug for GenericArg<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Ty(t) => std::fmt::Debug::fmt(t, f), + Self::Lifetime(r) => std::fmt::Debug::fmt(r, f), + Self::Const(c) => std::fmt::Debug::fmt(c, f), + } + } +} + +impl<'db> GenericArg<'db> { + pub fn ty(self) -> Option> { + match self.kind() { + GenericArgKind::Type(ty) => Some(ty), + _ => None, + } + } + + pub fn expect_ty(self) -> Ty<'db> { + match self.kind() { + GenericArgKind::Type(ty) => ty, + _ => panic!("Expected ty, got {self:?}"), + } + } + + pub fn region(self) -> Option> { + match self.kind() { + GenericArgKind::Lifetime(r) => Some(r), + _ => None, + } + } +} + +impl<'db> From> for GenericArg<'db> { + fn from(value: Term<'db>) -> Self { + match value { + Term::Ty(ty) => GenericArg::Ty(ty), + Term::Const(c) => GenericArg::Const(c), + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub enum Term<'db> { + Ty(Ty<'db>), + Const(Const<'db>), +} + +impl<'db> std::fmt::Debug for Term<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Ty(t) => std::fmt::Debug::fmt(t, f), + Self::Const(c) => std::fmt::Debug::fmt(c, f), + } + } +} + +impl<'db> Term<'db> { + pub fn expect_type(&self) -> Ty<'db> { + self.as_type().expect("expected a type, but found a const") + } + + pub fn is_trivially_wf(&self, tcx: DbInterner<'db>) -> bool { + match self.kind() { + TermKind::Ty(ty) => ty.is_trivially_wf(tcx), + TermKind::Const(ct) => ct.is_trivially_wf(), + } + } +} + +impl<'db> From> for GenericArg<'db> { + fn from(value: Ty<'db>) -> Self { + Self::Ty(value) + } +} + +impl<'db> From> for GenericArg<'db> { + fn from(value: Region<'db>) -> Self { + Self::Lifetime(value) + } +} + +impl<'db> From> for GenericArg<'db> { + fn from(value: Const<'db>) -> Self { + Self::Const(value) + } +} + +impl<'db> IntoKind for GenericArg<'db> { + type Kind = GenericArgKind>; + + fn kind(self) -> Self::Kind { + match self { + GenericArg::Ty(ty) => GenericArgKind::Type(ty), + GenericArg::Lifetime(region) => GenericArgKind::Lifetime(region), + GenericArg::Const(c) => GenericArgKind::Const(c), + } + } +} + +impl<'db> TypeVisitable> for GenericArg<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + match self { + GenericArg::Lifetime(lt) => lt.visit_with(visitor), + GenericArg::Ty(ty) => ty.visit_with(visitor), + GenericArg::Const(ct) => ct.visit_with(visitor), + } + } +} + +impl<'db> TypeFoldable> for GenericArg<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + match self.kind() { + GenericArgKind::Lifetime(lt) => lt.try_fold_with(folder).map(Into::into), + GenericArgKind::Type(ty) => ty.try_fold_with(folder).map(Into::into), + GenericArgKind::Const(ct) => ct.try_fold_with(folder).map(Into::into), + } + } + fn fold_with>>(self, folder: &mut F) -> Self { + match self.kind() { + GenericArgKind::Lifetime(lt) => lt.fold_with(folder).into(), + GenericArgKind::Type(ty) => ty.fold_with(folder).into(), + GenericArgKind::Const(ct) => ct.fold_with(folder).into(), + } + } +} + +impl<'db> Relate> for GenericArg<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + match (a.kind(), b.kind()) { + (GenericArgKind::Lifetime(a_lt), GenericArgKind::Lifetime(b_lt)) => { + Ok(relation.relate(a_lt, b_lt)?.into()) + } + (GenericArgKind::Type(a_ty), GenericArgKind::Type(b_ty)) => { + Ok(relation.relate(a_ty, b_ty)?.into()) + } + (GenericArgKind::Const(a_ct), GenericArgKind::Const(b_ct)) => { + Ok(relation.relate(a_ct, b_ct)?.into()) + } + (GenericArgKind::Lifetime(unpacked), x) => { + unreachable!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) + } + (GenericArgKind::Type(unpacked), x) => { + unreachable!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) + } + (GenericArgKind::Const(unpacked), x) => { + unreachable!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) + } + } + } +} + +interned_vec_db!(GenericArgs, GenericArg); + +impl<'db> rustc_type_ir::inherent::GenericArg> for GenericArg<'db> {} + +impl<'db> GenericArgs<'db> { + /// Creates an `GenericArgs` for generic parameter definitions, + /// by calling closures to obtain each kind. + /// The closures get to observe the `GenericArgs` as they're + /// being built, which can be used to correctly + /// replace defaults of generic parameters. + pub fn for_item( + interner: DbInterner<'db>, + def_id: SolverDefId, + mut mk_kind: F, + ) -> GenericArgs<'db> + where + F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + { + let defs = interner.generics_of(def_id); + let count = defs.count(); + let mut args = SmallVec::with_capacity(count); + Self::fill_item(&mut args, interner, defs, &mut mk_kind); + interner.mk_args(&args) + } + + fn fill_item( + args: &mut SmallVec<[GenericArg<'db>; 8]>, + interner: DbInterner<'_>, + defs: Generics, + mk_kind: &mut F, + ) where + F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + { + let self_len = defs.own_params.len() as u32; + if let Some(def_id) = defs.parent { + let parent_defs = interner.generics_of(def_id.into()); + Self::fill_item(args, interner, parent_defs, mk_kind); + } + Self::fill_single(args, &defs, mk_kind); + } + + fn fill_single(args: &mut SmallVec<[GenericArg<'db>; 8]>, defs: &Generics, mk_kind: &mut F) + where + F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + { + let start_len = args.len(); + args.reserve(defs.own_params.len()); + for param in &defs.own_params { + let kind = mk_kind(¶m.name, args.len() as u32, param.id, args); + args.push(kind); + } + } + + pub fn closure_sig_untupled(self) -> PolyFnSig<'db> { + let TyKind::FnPtr(inputs_and_output, hdr) = + self.split_closure_args_untupled().closure_sig_as_fn_ptr_ty.kind() + else { + unreachable!("not a function pointer") + }; + inputs_and_output.with(hdr) + } + + /// A "sensible" `.split_closure_args()`, where the arguments are not in a tuple. + pub fn split_closure_args_untupled(self) -> rustc_type_ir::ClosureArgsParts> { + // FIXME: should use `ClosureSubst` when possible + match self.inner().as_slice() { + [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => { + let interner = DbInterner::conjure(); + rustc_type_ir::ClosureArgsParts { + parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()), + closure_sig_as_fn_ptr_ty: sig_ty.expect_ty(), + closure_kind_ty: closure_kind_ty.expect_ty(), + tupled_upvars_ty: tupled_upvars_ty.expect_ty(), + } + } + _ => { + unreachable!("unexpected closure sig"); + } + } + } +} + +impl<'db> rustc_type_ir::relate::Relate> for GenericArgs<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + let interner = relation.cx(); + CollectAndApply::collect_and_apply( + std::iter::zip(a.iter(), b.iter()).map(|(a, b)| { + relation.relate_with_variance( + Variance::Invariant, + VarianceDiagInfo::default(), + a, + b, + ) + }), + |g| GenericArgs::new_from_iter(interner, g.iter().cloned()), + ) + } +} + +impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs<'db> { + fn as_closure(self) -> ClosureArgs> { + ClosureArgs { args: self } + } + fn as_coroutine(self) -> CoroutineArgs> { + CoroutineArgs { args: self } + } + fn as_coroutine_closure(self) -> CoroutineClosureArgs> { + CoroutineClosureArgs { args: self } + } + fn rebase_onto( + self, + interner: DbInterner<'db>, + source_def_id: as rustc_type_ir::Interner>::DefId, + target: as rustc_type_ir::Interner>::GenericArgs, + ) -> as rustc_type_ir::Interner>::GenericArgs { + let defs = interner.generics_of(source_def_id); + interner.mk_args_from_iter(target.iter().chain(self.iter().skip(defs.count()))) + } + + fn identity_for_item( + interner: DbInterner<'db>, + def_id: as rustc_type_ir::Interner>::DefId, + ) -> as rustc_type_ir::Interner>::GenericArgs { + Self::for_item(interner, def_id, |name, index, kind, _| { + mk_param(interner, index, name, kind) + }) + } + + fn extend_with_error( + interner: DbInterner<'db>, + def_id: as rustc_type_ir::Interner>::DefId, + original_args: &[ as rustc_type_ir::Interner>::GenericArg], + ) -> as rustc_type_ir::Interner>::GenericArgs { + Self::for_item(interner, def_id, |name, index, kind, _| { + if let Some(arg) = original_args.get(index as usize) { + *arg + } else { + error_for_param_kind(kind, interner) + } + }) + } + fn type_at(self, i: usize) -> as rustc_type_ir::Interner>::Ty { + self.inner() + .get(i) + .and_then(|g| g.as_type()) + .unwrap_or_else(|| Ty::new_error(DbInterner::conjure(), ErrorGuaranteed)) + } + + fn region_at(self, i: usize) -> as rustc_type_ir::Interner>::Region { + self.inner() + .get(i) + .and_then(|g| g.as_region()) + .unwrap_or_else(|| Region::error(DbInterner::conjure())) + } + + fn const_at(self, i: usize) -> as rustc_type_ir::Interner>::Const { + self.inner() + .get(i) + .and_then(|g| g.as_const()) + .unwrap_or_else(|| Const::error(DbInterner::conjure())) + } + + fn split_closure_args(self) -> rustc_type_ir::ClosureArgsParts> { + // FIXME: should use `ClosureSubst` when possible + match self.inner().as_slice() { + [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => { + let interner = DbInterner::conjure(); + // This is stupid, but the next solver expects the first input to actually be a tuple + let sig_ty = match sig_ty.expect_ty().kind() { + TyKind::FnPtr(sig_tys, header) => Ty::new( + interner, + TyKind::FnPtr( + sig_tys.map_bound(|s| { + let inputs = Ty::new_tup_from_iter(interner, s.inputs().iter()); + let output = s.output(); + FnSigTys { + inputs_and_output: Tys::new_from_iter( + interner, + [inputs, output], + ), + } + }), + header, + ), + ), + _ => unreachable!("sig_ty should be last"), + }; + rustc_type_ir::ClosureArgsParts { + parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()), + closure_sig_as_fn_ptr_ty: sig_ty, + closure_kind_ty: closure_kind_ty.expect_ty(), + tupled_upvars_ty: tupled_upvars_ty.expect_ty(), + } + } + _ => { + unreachable!("unexpected closure sig"); + } + } + } + + fn split_coroutine_closure_args( + self, + ) -> rustc_type_ir::CoroutineClosureArgsParts> { + match self.inner().as_slice() { + [ + parent_args @ .., + closure_kind_ty, + signature_parts_ty, + tupled_upvars_ty, + coroutine_captures_by_ref_ty, + coroutine_witness_ty, + ] => rustc_type_ir::CoroutineClosureArgsParts { + parent_args: GenericArgs::new_from_iter( + DbInterner::conjure(), + parent_args.iter().cloned(), + ), + closure_kind_ty: closure_kind_ty.expect_ty(), + signature_parts_ty: signature_parts_ty.expect_ty(), + tupled_upvars_ty: tupled_upvars_ty.expect_ty(), + coroutine_captures_by_ref_ty: coroutine_captures_by_ref_ty.expect_ty(), + }, + _ => panic!("GenericArgs were likely not for a CoroutineClosure."), + } + } + + fn split_coroutine_args(self) -> rustc_type_ir::CoroutineArgsParts> { + let interner = DbInterner::conjure(); + match self.inner().as_slice() { + [parent_args @ .., kind_ty, resume_ty, yield_ty, return_ty, tupled_upvars_ty] => { + rustc_type_ir::CoroutineArgsParts { + parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()), + kind_ty: kind_ty.expect_ty(), + resume_ty: resume_ty.expect_ty(), + yield_ty: yield_ty.expect_ty(), + return_ty: return_ty.expect_ty(), + tupled_upvars_ty: tupled_upvars_ty.expect_ty(), + } + } + _ => panic!("GenericArgs were likely not for a Coroutine."), + } + } +} + +pub fn mk_param<'db>( + interner: DbInterner<'db>, + index: u32, + name: &Symbol, + id: GenericParamId, +) -> GenericArg<'db> { + let name = name.clone(); + match id { + GenericParamId::LifetimeParamId(id) => { + Region::new_early_param(interner, EarlyParamRegion { index, id }).into() + } + GenericParamId::TypeParamId(id) => Ty::new_param(interner, id, index, name).into(), + GenericParamId::ConstParamId(id) => { + Const::new_param(interner, ParamConst { index, id }).into() + } + } +} + +pub fn error_for_param_kind<'db>(id: GenericParamId, interner: DbInterner<'db>) -> GenericArg<'db> { + match id { + GenericParamId::LifetimeParamId(_) => Region::error(interner).into(), + GenericParamId::TypeParamId(_) => Ty::new_error(interner, ErrorGuaranteed).into(), + GenericParamId::ConstParamId(_) => Const::error(interner).into(), + } +} + +impl<'db> IntoKind for Term<'db> { + type Kind = TermKind>; + + fn kind(self) -> Self::Kind { + match self { + Term::Ty(ty) => TermKind::Ty(ty), + Term::Const(c) => TermKind::Const(c), + } + } +} + +impl<'db> From> for Term<'db> { + fn from(value: Ty<'db>) -> Self { + Self::Ty(value) + } +} + +impl<'db> From> for Term<'db> { + fn from(value: Const<'db>) -> Self { + Self::Const(value) + } +} + +impl<'db> TypeVisitable> for Term<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + match self { + Term::Ty(ty) => ty.visit_with(visitor), + Term::Const(ct) => ct.visit_with(visitor), + } + } +} + +impl<'db> TypeFoldable> for Term<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + match self.kind() { + TermKind::Ty(ty) => ty.try_fold_with(folder).map(Into::into), + TermKind::Const(ct) => ct.try_fold_with(folder).map(Into::into), + } + } + fn fold_with>>(self, folder: &mut F) -> Self { + match self.kind() { + TermKind::Ty(ty) => ty.fold_with(folder).into(), + TermKind::Const(ct) => ct.fold_with(folder).into(), + } + } +} + +impl<'db> Relate> for Term<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + match (a.kind(), b.kind()) { + (TermKind::Ty(a_ty), TermKind::Ty(b_ty)) => Ok(relation.relate(a_ty, b_ty)?.into()), + (TermKind::Const(a_ct), TermKind::Const(b_ct)) => { + Ok(relation.relate(a_ct, b_ct)?.into()) + } + (TermKind::Ty(unpacked), x) => { + unreachable!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) + } + (TermKind::Const(unpacked), x) => { + unreachable!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) + } + } + } +} + +impl<'db> rustc_type_ir::inherent::Term> for Term<'db> {} + +#[derive(Clone, Eq, PartialEq, Debug)] +pub enum TermVid { + Ty(TyVid), + Const(ConstVid), +} + +impl From for TermVid { + fn from(value: TyVid) -> Self { + TermVid::Ty(value) + } +} + +impl From for TermVid { + fn from(value: ConstVid) -> Self { + TermVid::Const(value) + } +} + +impl<'db> DbInterner<'db> { + pub(super) fn mk_args(self, args: &[GenericArg<'db>]) -> GenericArgs<'db> { + GenericArgs::new_from_iter(self, args.iter().cloned()) + } + + pub(super) fn mk_args_from_iter(self, iter: I) -> T::Output + where + I: Iterator, + T: rustc_type_ir::CollectAndApply, GenericArgs<'db>>, + { + T::collect_and_apply(iter, |xs| self.mk_args(xs)) + } + + pub(super) fn check_args_compatible(self, def_id: SolverDefId, args: GenericArgs<'db>) -> bool { + // TODO + true + } + + pub(super) fn debug_assert_args_compatible(self, def_id: SolverDefId, args: GenericArgs<'db>) { + // TODO + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs new file mode 100644 index 0000000000000..5ec9a18a6c20e --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs @@ -0,0 +1,142 @@ +//! Things related to generics in the next-trait-solver. + +use hir_def::{ + ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId, Lookup, + TypeOrConstParamId, TypeParamId, + db::DefDatabase, + expr_store::ExpressionStore, + hir::generics::{ + GenericParamDataRef, GenericParams, LifetimeParamData, LocalLifetimeParamId, + LocalTypeOrConstParamId, TypeOrConstParamData, TypeParamData, TypeParamProvenance, + WherePredicate, + }, +}; +use hir_expand::name::Name; +use intern::{Symbol, sym}; +use la_arena::Arena; +use rustc_type_ir::inherent::Ty as _; +use triomphe::Arc; + +use crate::{db::HirDatabase, generics::parent_generic_def, next_solver::Ty}; + +use super::{Const, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId}; + +use super::{DbInterner, GenericArg}; + +pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics { + let mk_lt = |parent, index, local_id, lt: &LifetimeParamData| { + let name = lt.name.symbol().clone(); + let id = GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id }); + GenericParamDef { name, index, id } + }; + let mk_ty = |parent, index, local_id, p: &TypeOrConstParamData| { + let name = p.name().map(|n| n.symbol().clone()).unwrap_or_else(|| sym::MISSING_NAME); + let id = TypeOrConstParamId { parent, local_id }; + let id = match p { + TypeOrConstParamData::TypeParamData(_) => { + GenericParamId::TypeParamId(TypeParamId::from_unchecked(id)) + } + TypeOrConstParamData::ConstParamData(_) => { + GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)) + } + }; + GenericParamDef { name, index, id } + }; + let own_params_for_generic_params = |parent, params: &GenericParams| { + let mut result = Vec::with_capacity(params.len()); + let mut type_and_consts = params.iter_type_or_consts(); + let mut index = 0; + if let Some(self_param) = params.trait_self_param() { + result.push(mk_ty(parent, 0, self_param, ¶ms[self_param])); + type_and_consts.next(); + index += 1; + } + result.extend(params.iter_lt().map(|(local_id, data)| { + let lt = mk_lt(parent, index, local_id, data); + index += 1; + lt + })); + result.extend(type_and_consts.map(|(local_id, data)| { + let ty = mk_ty(parent, index, local_id, data); + index += 1; + ty + })); + result + }; + + let (parent, own_params) = match (def.try_into(), def) { + (Ok(def), _) => ( + parent_generic_def(db, def), + own_params_for_generic_params(def, &db.generic_params(def)), + ), + (_, SolverDefId::InternedOpaqueTyId(id)) => { + match db.lookup_intern_impl_trait_id(id) { + crate::ImplTraitId::ReturnTypeImplTrait(function_id, _) => { + // The opaque type itself does not have generics - only the parent function + (Some(GenericDefId::FunctionId(function_id)), vec![]) + } + crate::ImplTraitId::TypeAliasImplTrait(type_alias_id, _) => { + (Some(type_alias_id.into()), Vec::new()) + } + crate::ImplTraitId::AsyncBlockTypeImplTrait(def, _) => { + let param = TypeOrConstParamData::TypeParamData(TypeParamData { + name: None, + default: None, + provenance: TypeParamProvenance::TypeParamList, + }); + // Yes, there is a parent but we don't include it in the generics + // FIXME: It seems utterly sensitive to fake a generic param here. + // Also, what a horrible mess! + ( + None, + vec![mk_ty( + GenericDefId::FunctionId(salsa::plumbing::FromId::from_id(unsafe { + salsa::Id::from_index(salsa::Id::MAX_U32 - 1) + })), + 0, + LocalTypeOrConstParamId::from_raw(la_arena::RawIdx::from_u32(0)), + ¶m, + )], + ) + } + } + } + _ => panic!("No generics for {def:?}"), + }; + let parent_generics = parent.map(|def| Box::new(generics(db, def.into()))); + + Generics { + parent, + parent_count: parent_generics.map_or(0, |g| g.parent_count + g.own_params.len()), + own_params, + } +} + +#[derive(Debug)] +pub struct Generics { + pub parent: Option, + pub parent_count: usize, + pub own_params: Vec, +} + +#[derive(Debug)] +pub struct GenericParamDef { + pub(crate) name: Symbol, + //def_id: GenericDefId, + index: u32, + pub(crate) id: GenericParamId, +} + +impl GenericParamDef { + /// Returns the index of the param on the self generics only + /// (i.e. not including parent generics) + pub fn index(&self) -> u32 { + self.index + } +} + +impl<'db> rustc_type_ir::inherent::GenericsOf> for Generics { + fn count(&self) -> usize { + self.parent_count + self.own_params.len() + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs new file mode 100644 index 0000000000000..8dfffe0d365e7 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs @@ -0,0 +1,363 @@ +//! A nice interface for working with the infcx. The basic idea is to +//! do `infcx.at(cause, param_env)`, which sets the "cause" of the +//! operation as well as the surrounding parameter environment. Then +//! you can do something like `.sub(a, b)` or `.eq(a, b)` to create a +//! subtype or equality relationship respectively. The first argument +//! is always the "expected" output from the POV of diagnostics. +//! +//! Examples: +//! ```ignore (fragment) +//! infcx.at(cause, param_env).sub(a, b) +//! // requires that `a <: b`, with `a` considered the "expected" type +//! +//! infcx.at(cause, param_env).sup(a, b) +//! // requires that `b <: a`, with `a` considered the "expected" type +//! +//! infcx.at(cause, param_env).eq(a, b) +//! // requires that `a == b`, with `a` considered the "expected" type +//! ``` +//! For finer-grained control, you can also do use `trace`: +//! ```ignore (fragment) +//! infcx.at(...).trace(a, b).sub(&c, &d) +//! ``` +//! This will set `a` and `b` as the "root" values for +//! error-reporting, but actually operate on `c` and `d`. This is +//! sometimes useful when the types of `c` and `d` are not traceable +//! things. (That system should probably be refactored.) + +use rustc_type_ir::{ + FnSig, GenericArgKind, TypeFoldable, TypingMode, Variance, + error::ExpectedFound, + inherent::{IntoKind, Span as _}, + relate::{Relate, TypeRelation, solver_relating::RelateExt}, +}; + +use crate::next_solver::{ + AliasTerm, AliasTy, Binder, Const, DbInterner, GenericArg, Goal, ParamEnv, + PolyExistentialProjection, PolyExistentialTraitRef, PolyFnSig, Predicate, Region, Span, Term, + TraitRef, Ty, + fulfill::{FulfillmentCtxt, NextSolverError}, + infer::relate::lattice::{LatticeOp, LatticeOpKind}, +}; + +use super::{ + InferCtxt, InferOk, InferResult, TypeTrace, ValuePairs, + traits::{Obligation, ObligationCause}, +}; + +/// Whether we should define opaque types or just treat them opaquely. +/// +/// Currently only used to prevent predicate matching from matching anything +/// against opaque types. +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum DefineOpaqueTypes { + Yes, + No, +} + +#[derive(Clone, Copy)] +pub struct At<'a, 'db> { + pub infcx: &'a InferCtxt<'db>, + pub cause: &'a ObligationCause, + pub param_env: ParamEnv<'db>, +} + +impl<'db> InferCtxt<'db> { + #[inline] + pub fn at<'a>(&'a self, cause: &'a ObligationCause, param_env: ParamEnv<'db>) -> At<'a, 'db> { + At { infcx: self, cause, param_env } + } + + /// Forks the inference context, creating a new inference context with the same inference + /// variables in the same state. This can be used to "branch off" many tests from the same + /// common state. + pub fn fork(&self) -> Self { + Self { + interner: self.interner, + typing_mode: self.typing_mode, + inner: self.inner.clone(), + tainted_by_errors: self.tainted_by_errors.clone(), + universe: self.universe.clone(), + } + } + + /// Forks the inference context, creating a new inference context with the same inference + /// variables in the same state, except possibly changing the intercrate mode. This can be + /// used to "branch off" many tests from the same common state. Used in negative coherence. + pub fn fork_with_typing_mode(&self, typing_mode: TypingMode>) -> Self { + // Unlike `fork`, this invalidates all cache entries as they may depend on the + // typing mode. + + Self { + interner: self.interner, + typing_mode, + inner: self.inner.clone(), + tainted_by_errors: self.tainted_by_errors.clone(), + universe: self.universe.clone(), + } + } +} + +pub trait ToTrace<'db>: Relate> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db>; +} + +impl<'a, 'db> At<'a, 'db> { + /// Makes `actual <: expected`. For example, if type-checking a + /// call like `foo(x)`, where `foo: fn(i32)`, you might have + /// `sup(i32, x)`, since the "expected" type is the type that + /// appears in the signature. + pub fn sup( + self, + define_opaque_types: DefineOpaqueTypes, + expected: T, + actual: T, + ) -> InferResult<'db, ()> + where + T: ToTrace<'db>, + { + RelateExt::relate( + self.infcx, + self.param_env, + expected, + Variance::Contravariant, + actual, + Span::dummy(), + ) + .map(|goals| self.goals_to_obligations(goals)) + } + + /// Makes `expected <: actual`. + pub fn sub( + self, + define_opaque_types: DefineOpaqueTypes, + expected: T, + actual: T, + ) -> InferResult<'db, ()> + where + T: ToTrace<'db>, + { + RelateExt::relate( + self.infcx, + self.param_env, + expected, + Variance::Covariant, + actual, + Span::dummy(), + ) + .map(|goals| self.goals_to_obligations(goals)) + } + + /// Makes `expected == actual`. + pub fn eq( + self, + define_opaque_types: DefineOpaqueTypes, + expected: T, + actual: T, + ) -> InferResult<'db, ()> + where + T: ToTrace<'db>, + { + self.eq_trace( + define_opaque_types, + ToTrace::to_trace(self.cause, expected, actual), + expected, + actual, + ) + } + + /// Makes `expected == actual`. + pub fn eq_trace( + self, + define_opaque_types: DefineOpaqueTypes, + trace: TypeTrace<'db>, + expected: T, + actual: T, + ) -> InferResult<'db, ()> + where + T: Relate>, + { + RelateExt::relate( + self.infcx, + self.param_env, + expected, + Variance::Invariant, + actual, + Span::dummy(), + ) + .map(|goals| self.goals_to_obligations(goals)) + } + + pub fn relate( + self, + define_opaque_types: DefineOpaqueTypes, + expected: T, + variance: Variance, + actual: T, + ) -> InferResult<'db, ()> + where + T: ToTrace<'db>, + { + match variance { + Variance::Covariant => self.sub(define_opaque_types, expected, actual), + Variance::Invariant => self.eq(define_opaque_types, expected, actual), + Variance::Contravariant => self.sup(define_opaque_types, expected, actual), + + // We could make this make sense but it's not readily + // exposed and I don't feel like dealing with it. Note + // that bivariance in general does a bit more than just + // *nothing*, it checks that the types are the same + // "modulo variance" basically. + Variance::Bivariant => panic!("Bivariant given to `relate()`"), + } + } + + /// Deeply normalizes `value`, replacing all aliases which can by normalized in + /// the current environment. This errors in case normalization fails or is ambiguous. + pub fn deeply_normalize(self, value: T) -> Result>> + where + T: TypeFoldable>, + { + crate::next_solver::normalize::deeply_normalize(self, value) + } + + /// Computes the least-upper-bound, or mutual supertype, of two + /// values. The order of the arguments doesn't matter, but since + /// this can result in an error (e.g., if asked to compute LUB of + /// u32 and i32), it is meaningful to call one of them the + /// "expected type". + pub fn lub(self, expected: T, actual: T) -> InferResult<'db, T> + where + T: ToTrace<'db>, + { + let mut op = LatticeOp::new( + self.infcx, + ToTrace::to_trace(self.cause, expected, actual), + self.param_env, + LatticeOpKind::Lub, + ); + let value = op.relate(expected, actual)?; + Ok(InferOk { value, obligations: op.into_obligations() }) + } + + fn goals_to_obligations(&self, goals: Vec>>) -> InferOk<'db, ()> { + InferOk { + value: (), + obligations: goals + .into_iter() + .map(|goal| { + Obligation::new( + self.infcx.interner, + self.cause.clone(), + goal.param_env, + goal.predicate, + ) + }) + .collect(), + } + } +} + +impl<'db> ToTrace<'db> for Ty<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::Terms(ExpectedFound::new(a.into(), b.into())), + } + } +} + +impl<'db> ToTrace<'db> for Region<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { cause: cause.clone(), values: ValuePairs::Regions(ExpectedFound::new(a, b)) } + } +} + +impl<'db> ToTrace<'db> for Const<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::Terms(ExpectedFound::new(a.into(), b.into())), + } + } +} + +impl<'db> ToTrace<'db> for GenericArg<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: match (a.kind(), b.kind()) { + (GenericArgKind::Lifetime(a), GenericArgKind::Lifetime(b)) => { + ValuePairs::Regions(ExpectedFound::new(a, b)) + } + (GenericArgKind::Type(a), GenericArgKind::Type(b)) => { + ValuePairs::Terms(ExpectedFound::new(a.into(), b.into())) + } + (GenericArgKind::Const(a), GenericArgKind::Const(b)) => { + ValuePairs::Terms(ExpectedFound::new(a.into(), b.into())) + } + _ => panic!("relating different kinds: {a:?} {b:?}"), + }, + } + } +} + +impl<'db> ToTrace<'db> for Term<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { cause: cause.clone(), values: ValuePairs::Terms(ExpectedFound::new(a, b)) } + } +} + +impl<'db> ToTrace<'db> for TraitRef<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { cause: cause.clone(), values: ValuePairs::TraitRefs(ExpectedFound::new(a, b)) } + } +} + +impl<'db> ToTrace<'db> for AliasTy<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::Aliases(ExpectedFound::new(a.into(), b.into())), + } + } +} + +impl<'db> ToTrace<'db> for AliasTerm<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { cause: cause.clone(), values: ValuePairs::Aliases(ExpectedFound::new(a, b)) } + } +} + +impl<'db> ToTrace<'db> for FnSig> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::PolySigs(ExpectedFound::new(Binder::dummy(a), Binder::dummy(b))), + } + } +} + +impl<'db> ToTrace<'db> for PolyFnSig<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { cause: cause.clone(), values: ValuePairs::PolySigs(ExpectedFound::new(a, b)) } + } +} + +impl<'db> ToTrace<'db> for PolyExistentialTraitRef<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::ExistentialTraitRef(ExpectedFound::new(a, b)), + } + } +} + +impl<'db> ToTrace<'db> for PolyExistentialProjection<'db> { + fn to_trace(cause: &ObligationCause, a: Self, b: Self) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::ExistentialProjection(ExpectedFound::new(a, b)), + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs new file mode 100644 index 0000000000000..beaac11a2de41 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs @@ -0,0 +1,792 @@ +//! This module contains code to canonicalize values into a `Canonical<'db, T>`. +//! +//! For an overview of what canonicalization is and how it fits into +//! rustc, check out the [chapter in the rustc dev guide][c]. +//! +//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html + +use rustc_hash::FxHashMap; +use rustc_index::Idx; +use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar}; +use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _}; +use rustc_type_ir::{ + BoundVar, CanonicalQueryInput, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags, + TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, +}; +use smallvec::SmallVec; +use tracing::debug; + +use crate::next_solver::infer::InferCtxt; +use crate::next_solver::{ + Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, Canonical, CanonicalVarKind, + CanonicalVars, Const, ConstKind, DbInterner, GenericArg, ParamEnvAnd, Placeholder, Region, Ty, + TyKind, +}; + +/// When we canonicalize a value to form a query, we wind up replacing +/// various parts of it with canonical variables. This struct stores +/// those replaced bits to remember for when we process the query +/// result. +#[derive(Clone, Debug)] +pub struct OriginalQueryValues<'db> { + /// Map from the universes that appear in the query to the universes in the + /// caller context. For all queries except `evaluate_goal` (used by Chalk), + /// we only ever put ROOT values into the query, so this map is very + /// simple. + pub universe_map: SmallVec<[UniverseIndex; 4]>, + + /// This is equivalent to `CanonicalVarValues`, but using a + /// `SmallVec` yields a significant performance win. + pub var_values: SmallVec<[GenericArg<'db>; 8]>, +} + +impl<'db> Default for OriginalQueryValues<'db> { + fn default() -> Self { + let mut universe_map = SmallVec::default(); + universe_map.push(UniverseIndex::ROOT); + + Self { universe_map, var_values: SmallVec::default() } + } +} + +impl<'db> InferCtxt<'db> { + /// Canonicalizes a query value `V`. When we canonicalize a query, + /// we not only canonicalize unbound inference variables, but we + /// *also* replace all free regions whatsoever. So for example a + /// query like `T: Trait<'static>` would be canonicalized to + /// + /// ```text + /// T: Trait<'?0> + /// ``` + /// + /// with a mapping M that maps `'?0` to `'static`. + /// + /// To get a good understanding of what is happening here, check + /// out the [chapter in the rustc dev guide][c]. + /// + /// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#canonicalizing-the-query + pub fn canonicalize_query( + &self, + value: ParamEnvAnd<'db, V>, + query_state: &mut OriginalQueryValues<'db>, + ) -> CanonicalQueryInput, ParamEnvAnd<'db, V>> + where + V: TypeFoldable>, + { + let (param_env, value) = value.into_parts(); + // FIXME(#118965): We don't canonicalize the static lifetimes that appear in the + // `param_env` because they are treated differently by trait selection. + let canonical_param_env = Canonicalizer::canonicalize( + param_env, + self, + self.interner, + &CanonicalizeFreeRegionsOtherThanStatic, + query_state, + ); + + let canonical = Canonicalizer::canonicalize_with_base( + canonical_param_env, + value, + self, + self.interner, + &CanonicalizeAllFreeRegions, + query_state, + ) + .unchecked_map(|(param_env, value)| ParamEnvAnd { param_env, value }); + CanonicalQueryInput { canonical, typing_mode: self.typing_mode() } + } + + /// Canonicalizes a query *response* `V`. When we canonicalize a + /// query response, we only canonicalize unbound inference + /// variables, and we leave other free regions alone. So, + /// continuing with the example from `canonicalize_query`, if + /// there was an input query `T: Trait<'static>`, it would have + /// been canonicalized to + /// + /// ```text + /// T: Trait<'?0> + /// ``` + /// + /// with a mapping M that maps `'?0` to `'static`. But if we found that there + /// exists only one possible impl of `Trait`, and it looks like + /// ```ignore (illustrative) + /// impl Trait<'static> for T { .. } + /// ``` + /// then we would prepare a query result R that (among other + /// things) includes a mapping to `'?0 := 'static`. When + /// canonicalizing this query result R, we would leave this + /// reference to `'static` alone. + /// + /// To get a good understanding of what is happening here, check + /// out the [chapter in the rustc dev guide][c]. + /// + /// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#canonicalizing-the-query-result + pub fn canonicalize_response(&self, value: V) -> Canonical<'db, V> + where + V: TypeFoldable>, + { + let mut query_state = OriginalQueryValues::default(); + Canonicalizer::canonicalize( + value, + self, + self.interner, + &CanonicalizeQueryResponse, + &mut query_state, + ) + } + + pub fn canonicalize_user_type_annotation(&self, value: V) -> Canonical<'db, V> + where + V: TypeFoldable>, + { + let mut query_state = OriginalQueryValues::default(); + Canonicalizer::canonicalize( + value, + self, + self.interner, + &CanonicalizeUserTypeAnnotation, + &mut query_state, + ) + } +} + +/// Controls how we canonicalize "free regions" that are not inference +/// variables. This depends on what we are canonicalizing *for* -- +/// e.g., if we are canonicalizing to create a query, we want to +/// replace those with inference variables, since we want to make a +/// maximally general query. But if we are canonicalizing a *query +/// response*, then we don't typically replace free regions, as they +/// must have been introduced from other parts of the system. +trait CanonicalizeMode { + fn canonicalize_free_region<'db>( + &self, + canonicalizer: &mut Canonicalizer<'_, 'db>, + r: Region<'db>, + ) -> Region<'db>; + + fn any(&self) -> bool; + + // Do we preserve universe of variables. + fn preserve_universes(&self) -> bool; +} + +struct CanonicalizeQueryResponse; + +impl CanonicalizeMode for CanonicalizeQueryResponse { + fn canonicalize_free_region<'db>( + &self, + canonicalizer: &mut Canonicalizer<'_, 'db>, + mut r: Region<'db>, + ) -> Region<'db> { + let infcx = canonicalizer.infcx; + + if let RegionKind::ReVar(vid) = r.kind() { + r = infcx + .inner + .borrow_mut() + .unwrap_region_constraints() + .opportunistic_resolve_var(canonicalizer.tcx, vid); + debug!( + "canonical: region var found with vid {vid:?}, \ + opportunistically resolved to {r:?}", + ); + }; + + match r.kind() { + RegionKind::ReLateParam(_) + | RegionKind::ReErased + | RegionKind::ReStatic + | RegionKind::ReEarlyParam(..) + | RegionKind::ReError(..) => r, + + RegionKind::RePlaceholder(placeholder) => canonicalizer + .canonical_var_for_region(CanonicalVarKind::PlaceholderRegion(placeholder), r), + + RegionKind::ReVar(vid) => { + let universe = infcx + .inner + .borrow_mut() + .unwrap_region_constraints() + .probe_value(vid) + .unwrap_err(); + canonicalizer.canonical_var_for_region(CanonicalVarKind::Region(universe), r) + } + + _ => { + // Other than `'static` or `'empty`, the query + // response should be executing in a fully + // canonicalized environment, so there shouldn't be + // any other region names it can come up. + // + // rust-lang/rust#57464: `impl Trait` can leak local + // scopes (in manner violating typeck). Therefore, use + // `delayed_bug` to allow type error over an ICE. + panic!("unexpected region in query response: `{r:?}`"); + } + } + } + + fn any(&self) -> bool { + false + } + + fn preserve_universes(&self) -> bool { + true + } +} + +struct CanonicalizeUserTypeAnnotation; + +impl CanonicalizeMode for CanonicalizeUserTypeAnnotation { + fn canonicalize_free_region<'db>( + &self, + canonicalizer: &mut Canonicalizer<'_, 'db>, + r: Region<'db>, + ) -> Region<'db> { + match r.kind() { + RegionKind::ReEarlyParam(_) + | RegionKind::ReLateParam(_) + | RegionKind::ReErased + | RegionKind::ReStatic + | RegionKind::ReError(_) => r, + RegionKind::ReVar(_) => canonicalizer.canonical_var_for_region_in_root_universe(r), + RegionKind::RePlaceholder(..) | RegionKind::ReBound(..) => { + // We only expect region names that the user can type. + panic!("unexpected region in query response: `{r:?}`") + } + } + } + + fn any(&self) -> bool { + false + } + + fn preserve_universes(&self) -> bool { + false + } +} + +struct CanonicalizeAllFreeRegions; + +impl CanonicalizeMode for CanonicalizeAllFreeRegions { + fn canonicalize_free_region<'db>( + &self, + canonicalizer: &mut Canonicalizer<'_, 'db>, + r: Region<'db>, + ) -> Region<'db> { + canonicalizer.canonical_var_for_region_in_root_universe(r) + } + + fn any(&self) -> bool { + true + } + + fn preserve_universes(&self) -> bool { + false + } +} + +struct CanonicalizeFreeRegionsOtherThanStatic; + +impl CanonicalizeMode for CanonicalizeFreeRegionsOtherThanStatic { + fn canonicalize_free_region<'db>( + &self, + canonicalizer: &mut Canonicalizer<'_, 'db>, + r: Region<'db>, + ) -> Region<'db> { + if r.is_static() { r } else { canonicalizer.canonical_var_for_region_in_root_universe(r) } + } + + fn any(&self) -> bool { + true + } + + fn preserve_universes(&self) -> bool { + false + } +} + +struct Canonicalizer<'cx, 'db> { + /// Set to `None` to disable the resolution of inference variables. + infcx: &'cx InferCtxt<'db>, + tcx: DbInterner<'db>, + variables: SmallVec<[CanonicalVarKind<'db>; 8]>, + query_state: &'cx mut OriginalQueryValues<'db>, + // Note that indices is only used once `var_values` is big enough to be + // heap-allocated. + indices: FxHashMap, BoundVar>, + /// Maps each `sub_unification_table_root_var` to the index of the first + /// variable which used it. + /// + /// This means in case two type variables have the same sub relations root, + /// we set the `sub_root` of the second variable to the position of the first. + /// Otherwise the `sub_root` of each type variable is just its own position. + sub_root_lookup_table: FxHashMap, + canonicalize_mode: &'cx dyn CanonicalizeMode, + needs_canonical_flags: TypeFlags, + + binder_index: DebruijnIndex, +} + +impl<'cx, 'db> TypeFolder> for Canonicalizer<'cx, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.tcx + } + + fn fold_binder(&mut self, t: Binder<'db, T>) -> Binder<'db, T> + where + T: TypeFoldable>, + { + self.binder_index.shift_in(1); + let t = t.super_fold_with(self); + self.binder_index.shift_out(1); + t + } + + fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { + match r.kind() { + RegionKind::ReBound(index, ..) => { + if index >= self.binder_index { + panic!("escaping late-bound region during canonicalization"); + } else { + r + } + } + + RegionKind::ReStatic + | RegionKind::ReEarlyParam(..) + | RegionKind::ReError(_) + | RegionKind::ReLateParam(_) + | RegionKind::RePlaceholder(..) + | RegionKind::ReVar(_) + | RegionKind::ReErased => self.canonicalize_mode.canonicalize_free_region(self, r), + } + } + + fn fold_ty(&mut self, mut t: Ty<'db>) -> Ty<'db> { + match t.kind() { + TyKind::Infer(TyVar(mut vid)) => { + // We need to canonicalize the *root* of our ty var. + // This is so that our canonical response correctly reflects + // any equated inference vars correctly! + let root_vid = self.infcx.root_var(vid); + if root_vid != vid { + t = Ty::new_var(self.tcx, root_vid); + vid = root_vid; + } + + debug!("canonical: type var found with vid {:?}", vid); + match self.infcx.probe_ty_var(vid) { + // `t` could be a float / int variable; canonicalize that instead. + Ok(t) => { + debug!("(resolved to {:?})", t); + self.fold_ty(t) + } + + // `TyVar(vid)` is unresolved, track its universe index in the canonicalized + // result. + Err(mut ui) => { + if !self.canonicalize_mode.preserve_universes() { + // FIXME: perf problem described in #55921. + ui = UniverseIndex::ROOT; + } + + let sub_root = self.get_or_insert_sub_root(vid); + self.canonicalize_ty_var(CanonicalVarKind::Ty { ui, sub_root }, t) + } + } + } + + TyKind::Infer(IntVar(vid)) => { + let nt = self.infcx.opportunistic_resolve_int_var(vid); + if nt != t { + self.fold_ty(nt) + } else { + self.canonicalize_ty_var(CanonicalVarKind::Int, t) + } + } + TyKind::Infer(FloatVar(vid)) => { + let nt = self.infcx.opportunistic_resolve_float_var(vid); + if nt != t { + self.fold_ty(nt) + } else { + self.canonicalize_ty_var(CanonicalVarKind::Float, t) + } + } + + TyKind::Infer( + InferTy::FreshTy(_) | InferTy::FreshIntTy(_) | InferTy::FreshFloatTy(_), + ) => { + panic!("encountered a fresh type during canonicalization") + } + + TyKind::Placeholder(mut placeholder) => { + if !self.canonicalize_mode.preserve_universes() { + placeholder.universe = UniverseIndex::ROOT; + } + self.canonicalize_ty_var(CanonicalVarKind::PlaceholderTy(placeholder), t) + } + + TyKind::Bound(debruijn, _) => { + if debruijn >= self.binder_index { + panic!("escaping bound type during canonicalization") + } else { + t + } + } + + TyKind::Closure(..) + | TyKind::CoroutineClosure(..) + | TyKind::Coroutine(..) + | TyKind::CoroutineWitness(..) + | TyKind::Bool + | TyKind::Char + | TyKind::Int(..) + | TyKind::Uint(..) + | TyKind::Float(..) + | TyKind::Adt(..) + | TyKind::Str + | TyKind::Error(_) + | TyKind::Array(..) + | TyKind::Slice(..) + | TyKind::RawPtr(..) + | TyKind::Ref(..) + | TyKind::FnDef(..) + | TyKind::FnPtr(..) + | TyKind::Dynamic(..) + | TyKind::UnsafeBinder(_) + | TyKind::Never + | TyKind::Tuple(..) + | TyKind::Alias(..) + | TyKind::Foreign(..) + | TyKind::Pat(..) + | TyKind::Param(..) => { + if t.flags().intersects(self.needs_canonical_flags) { + t.super_fold_with(self) + } else { + t + } + } + } + } + + fn fold_const(&mut self, mut ct: Const<'db>) -> Const<'db> { + match ct.kind() { + ConstKind::Infer(InferConst::Var(mut vid)) => { + // We need to canonicalize the *root* of our const var. + // This is so that our canonical response correctly reflects + // any equated inference vars correctly! + let root_vid = self.infcx.root_const_var(vid); + if root_vid != vid { + ct = Const::new_var(self.tcx, root_vid); + vid = root_vid; + } + + debug!("canonical: const var found with vid {:?}", vid); + match self.infcx.probe_const_var(vid) { + Ok(c) => { + debug!("(resolved to {:?})", c); + return self.fold_const(c); + } + + // `ConstVar(vid)` is unresolved, track its universe index in the + // canonicalized result + Err(mut ui) => { + if !self.canonicalize_mode.preserve_universes() { + // FIXME: perf problem described in #55921. + ui = UniverseIndex::ROOT; + } + return self.canonicalize_const_var(CanonicalVarKind::Const(ui), ct); + } + } + } + ConstKind::Infer(InferConst::Fresh(_)) => { + panic!("encountered a fresh const during canonicalization") + } + ConstKind::Bound(debruijn, _) => { + if debruijn >= self.binder_index { + panic!("escaping bound const during canonicalization") + } else { + return ct; + } + } + ConstKind::Placeholder(placeholder) => { + return self + .canonicalize_const_var(CanonicalVarKind::PlaceholderConst(placeholder), ct); + } + _ => {} + } + + if ct.flags().intersects(self.needs_canonical_flags) { + ct.super_fold_with(self) + } else { + ct + } + } +} + +impl<'cx, 'db> Canonicalizer<'cx, 'db> { + /// The main `canonicalize` method, shared impl of + /// `canonicalize_query` and `canonicalize_response`. + fn canonicalize( + value: V, + infcx: &InferCtxt<'db>, + tcx: DbInterner<'db>, + canonicalize_region_mode: &dyn CanonicalizeMode, + query_state: &mut OriginalQueryValues<'db>, + ) -> Canonical<'db, V> + where + V: TypeFoldable>, + { + let base = Canonical { + max_universe: UniverseIndex::ROOT, + variables: CanonicalVars::new_from_iter(tcx, []), + value: (), + }; + Canonicalizer::canonicalize_with_base( + base, + value, + infcx, + tcx, + canonicalize_region_mode, + query_state, + ) + .unchecked_map(|((), val)| val) + } + + fn canonicalize_with_base( + base: Canonical<'db, U>, + value: V, + infcx: &InferCtxt<'db>, + tcx: DbInterner<'db>, + canonicalize_region_mode: &dyn CanonicalizeMode, + query_state: &mut OriginalQueryValues<'db>, + ) -> Canonical<'db, (U, V)> + where + V: TypeFoldable>, + { + let needs_canonical_flags = if canonicalize_region_mode.any() { + TypeFlags::HAS_INFER | TypeFlags::HAS_PLACEHOLDER | TypeFlags::HAS_FREE_REGIONS + } else { + TypeFlags::HAS_INFER | TypeFlags::HAS_PLACEHOLDER + }; + + // Fast path: nothing that needs to be canonicalized. + if !value.has_type_flags(needs_canonical_flags) { + return base.unchecked_map(|b| (b, value)); + } + + let mut canonicalizer = Canonicalizer { + infcx, + tcx, + canonicalize_mode: canonicalize_region_mode, + needs_canonical_flags, + variables: SmallVec::from_slice(base.variables.as_slice()), + query_state, + indices: FxHashMap::default(), + sub_root_lookup_table: Default::default(), + binder_index: DebruijnIndex::ZERO, + }; + if canonicalizer.query_state.var_values.spilled() { + canonicalizer.indices = canonicalizer + .query_state + .var_values + .iter() + .enumerate() + .map(|(i, &kind)| (kind, BoundVar::from(i))) + .collect(); + } + let out_value = value.fold_with(&mut canonicalizer); + + // Once we have canonicalized `out_value`, it should not + // contain anything that ties it to this inference context + // anymore. + debug_assert!(!out_value.has_infer() && !out_value.has_placeholders()); + + let canonical_variables = + CanonicalVars::new_from_iter(tcx, canonicalizer.universe_canonicalized_variables()); + + let max_universe = canonical_variables + .iter() + .map(|cvar| cvar.universe()) + .max() + .unwrap_or(UniverseIndex::ROOT); + + Canonical { max_universe, variables: canonical_variables, value: (base.value, out_value) } + } + + /// Creates a canonical variable replacing `kind` from the input, + /// or returns an existing variable if `kind` has already been + /// seen. `kind` is expected to be an unbound variable (or + /// potentially a free region). + fn canonical_var(&mut self, info: CanonicalVarKind<'db>, kind: GenericArg<'db>) -> BoundVar { + let Canonicalizer { variables, query_state, indices, .. } = self; + + let var_values = &mut query_state.var_values; + + let universe = info.universe(); + if universe != UniverseIndex::ROOT { + assert!(self.canonicalize_mode.preserve_universes()); + + // Insert universe into the universe map. To preserve the order of the + // universes in the value being canonicalized, we don't update the + // universe in `info` until we have finished canonicalizing. + match query_state.universe_map.binary_search(&universe) { + Err(idx) => query_state.universe_map.insert(idx, universe), + Ok(_) => {} + } + } + + // This code is hot. `variables` and `var_values` are usually small + // (fewer than 8 elements ~95% of the time). They are SmallVec's to + // avoid allocations in those cases. We also don't use `indices` to + // determine if a kind has been seen before until the limit of 8 has + // been exceeded, to also avoid allocations for `indices`. + if !var_values.spilled() { + // `var_values` is stack-allocated. `indices` isn't used yet. Do a + // direct linear search of `var_values`. + if let Some(idx) = var_values.iter().position(|&k| k == kind) { + // `kind` is already present in `var_values`. + BoundVar::new(idx) + } else { + // `kind` isn't present in `var_values`. Append it. Likewise + // for `info` and `variables`. + variables.push(info); + var_values.push(kind); + assert_eq!(variables.len(), var_values.len()); + + // If `var_values` has become big enough to be heap-allocated, + // fill up `indices` to facilitate subsequent lookups. + if var_values.spilled() { + assert!(indices.is_empty()); + *indices = var_values + .iter() + .enumerate() + .map(|(i, &kind)| (kind, BoundVar::new(i))) + .collect(); + } + // The cv is the index of the appended element. + BoundVar::new(var_values.len() - 1) + } + } else { + // `var_values` is large. Do a hashmap search via `indices`. + *indices.entry(kind).or_insert_with(|| { + variables.push(info); + var_values.push(kind); + assert_eq!(variables.len(), var_values.len()); + BoundVar::new(variables.len() - 1) + }) + } + } + + fn get_or_insert_sub_root(&mut self, vid: TyVid) -> BoundVar { + let root_vid = self.infcx.sub_unification_table_root_var(vid); + let idx = + *self.sub_root_lookup_table.entry(root_vid).or_insert_with(|| self.variables.len()); + BoundVar::from(idx) + } + + /// Replaces the universe indexes used in `var_values` with their index in + /// `query_state.universe_map`. This minimizes the maximum universe used in + /// the canonicalized value. + fn universe_canonicalized_variables(self) -> SmallVec<[CanonicalVarKind<'db>; 8]> { + if self.query_state.universe_map.len() == 1 { + return self.variables; + } + + let reverse_universe_map: FxHashMap = self + .query_state + .universe_map + .iter() + .enumerate() + .map(|(idx, universe)| (*universe, UniverseIndex::from_usize(idx))) + .collect(); + + self.variables + .iter() + .map(|v| match *v { + CanonicalVarKind::Int | CanonicalVarKind::Float => *v, + CanonicalVarKind::Ty { ui, sub_root } => { + CanonicalVarKind::Ty { ui: reverse_universe_map[&ui], sub_root } + } + CanonicalVarKind::Region(u) => CanonicalVarKind::Region(reverse_universe_map[&u]), + CanonicalVarKind::Const(u) => CanonicalVarKind::Const(reverse_universe_map[&u]), + CanonicalVarKind::PlaceholderTy(placeholder) => { + CanonicalVarKind::PlaceholderTy(Placeholder { + universe: reverse_universe_map[&placeholder.universe], + ..placeholder + }) + } + CanonicalVarKind::PlaceholderRegion(placeholder) => { + CanonicalVarKind::PlaceholderRegion(Placeholder { + universe: reverse_universe_map[&placeholder.universe], + ..placeholder + }) + } + CanonicalVarKind::PlaceholderConst(placeholder) => { + CanonicalVarKind::PlaceholderConst(Placeholder { + universe: reverse_universe_map[&placeholder.universe], + ..placeholder + }) + } + }) + .collect() + } + + /// Shorthand helper that creates a canonical region variable for + /// `r` (always in the root universe). The reason that we always + /// put these variables into the root universe is because this + /// method is used during **query construction:** in that case, we + /// are taking all the regions and just putting them into the most + /// generic context we can. This may generate solutions that don't + /// fit (e.g., that equate some region variable with a placeholder + /// it can't name) on the caller side, but that's ok, the caller + /// can figure that out. In the meantime, it maximizes our + /// caching. + /// + /// (This works because unification never fails -- and hence trait + /// selection is never affected -- due to a universe mismatch.) + fn canonical_var_for_region_in_root_universe(&mut self, r: Region<'db>) -> Region<'db> { + self.canonical_var_for_region(CanonicalVarKind::Region(UniverseIndex::ROOT), r) + } + + /// Creates a canonical variable (with the given `info`) + /// representing the region `r`; return a region referencing it. + fn canonical_var_for_region( + &mut self, + info: CanonicalVarKind<'db>, + r: Region<'db>, + ) -> Region<'db> { + let var = self.canonical_var(info, r.into()); + let br = BoundRegion { var, kind: BoundRegionKind::Anon }; + Region::new_bound(self.cx(), self.binder_index, br) + } + + /// Given a type variable `ty_var` of the given kind, first check + /// if `ty_var` is bound to anything; if so, canonicalize + /// *that*. Otherwise, create a new canonical variable for + /// `ty_var`. + fn canonicalize_ty_var(&mut self, info: CanonicalVarKind<'db>, ty_var: Ty<'db>) -> Ty<'db> { + debug_assert_eq!(ty_var, self.infcx.shallow_resolve(ty_var)); + let var = self.canonical_var(info, ty_var.into()); + Ty::new_bound( + self.tcx, + self.binder_index, + BoundTy { kind: crate::next_solver::BoundTyKind::Anon, var }, + ) + } + + /// Given a type variable `const_var` of the given kind, first check + /// if `const_var` is bound to anything; if so, canonicalize + /// *that*. Otherwise, create a new canonical variable for + /// `const_var`. + fn canonicalize_const_var( + &mut self, + info: CanonicalVarKind<'db>, + const_var: Const<'db>, + ) -> Const<'db> { + debug_assert_eq!(const_var, self.infcx.shallow_resolve_const(const_var)); + let var = self.canonical_var(info, const_var.into()); + Const::new_bound(self.tcx, self.binder_index, BoundConst { var }) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs new file mode 100644 index 0000000000000..6c7a87ef52494 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs @@ -0,0 +1,107 @@ +//! This module contains code to instantiate new values into a +//! `Canonical<'tcx, T>`. +//! +//! For an overview of what canonicalization is and how it fits into +//! rustc, check out the [chapter in the rustc dev guide][c]. +//! +//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html + +use crate::next_solver::BoundConst; +use crate::next_solver::{ + AliasTy, Binder, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Const, DbInterner, Goal, + ParamEnv, Predicate, PredicateKind, Region, Ty, TyKind, + fold::FnMutDelegate, + infer::{ + DefineOpaqueTypes, InferCtxt, TypeTrace, + traits::{Obligation, PredicateObligations}, + }, +}; +use rustc_type_ir::{ + AliasRelationDirection, AliasTyKind, BoundVar, GenericArgKind, InferTy, TypeFoldable, Upcast, + Variance, + inherent::{IntoKind, SliceLike}, + relate::{ + Relate, TypeRelation, VarianceDiagInfo, + combine::{super_combine_consts, super_combine_tys}, + }, +}; + +pub trait CanonicalExt<'db, V> { + fn instantiate(&self, tcx: DbInterner<'db>, var_values: &CanonicalVarValues<'db>) -> V + where + V: TypeFoldable>; + fn instantiate_projected( + &self, + tcx: DbInterner<'db>, + var_values: &CanonicalVarValues<'db>, + projection_fn: impl FnOnce(&V) -> T, + ) -> T + where + T: TypeFoldable>; +} + +/// FIXME(-Znext-solver): This or public because it is shared with the +/// new trait solver implementation. We should deduplicate canonicalization. +impl<'db, V> CanonicalExt<'db, V> for Canonical<'db, V> { + /// Instantiate the wrapped value, replacing each canonical value + /// with the value given in `var_values`. + fn instantiate(&self, tcx: DbInterner<'db>, var_values: &CanonicalVarValues<'db>) -> V + where + V: TypeFoldable>, + { + self.instantiate_projected(tcx, var_values, |value| value.clone()) + } + + /// Allows one to apply a instantiation to some subset of + /// `self.value`. Invoke `projection_fn` with `self.value` to get + /// a value V that is expressed in terms of the same canonical + /// variables bound in `self` (usually this extracts from subset + /// of `self`). Apply the instantiation `var_values` to this value + /// V, replacing each of the canonical variables. + fn instantiate_projected( + &self, + tcx: DbInterner<'db>, + var_values: &CanonicalVarValues<'db>, + projection_fn: impl FnOnce(&V) -> T, + ) -> T + where + T: TypeFoldable>, + { + assert_eq!(self.variables.len(), var_values.len()); + let value = projection_fn(&self.value); + instantiate_value(tcx, var_values, value) + } +} + +/// Instantiate the values from `var_values` into `value`. `var_values` +/// must be values for the set of canonical variables that appear in +/// `value`. +pub(super) fn instantiate_value<'db, T>( + tcx: DbInterner<'db>, + var_values: &CanonicalVarValues<'db>, + value: T, +) -> T +where + T: TypeFoldable>, +{ + if var_values.var_values.is_empty() { + value + } else { + let delegate = FnMutDelegate { + regions: &mut |br: BoundRegion| match var_values[br.var].kind() { + GenericArgKind::Lifetime(l) => l, + r => panic!("{br:?} is a region but value is {r:?}"), + }, + types: &mut |bound_ty: BoundTy| match var_values[bound_ty.var].kind() { + GenericArgKind::Type(ty) => ty, + r => panic!("{bound_ty:?} is a type but value is {r:?}"), + }, + consts: &mut |bound_ct: BoundConst| match var_values[bound_ct.var].kind() { + GenericArgKind::Const(ct) => ct, + c => panic!("{bound_ct:?} is a const but value is {c:?}"), + }, + }; + + tcx.replace_escaping_bound_vars_uncached(value, delegate) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/mod.rs new file mode 100644 index 0000000000000..d0669f5c3bcc5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/mod.rs @@ -0,0 +1,150 @@ +//! **Canonicalization** is the key to constructing a query in the +//! middle of type inference. Ordinarily, it is not possible to store +//! types from type inference in query keys, because they contain +//! references to inference variables whose lifetimes are too short +//! and so forth. Canonicalizing a value T1 using `canonicalize_query` +//! produces two things: +//! +//! - a value T2 where each unbound inference variable has been +//! replaced with a **canonical variable**; +//! - a map M (of type `CanonicalVarValues`) from those canonical +//! variables back to the original. +//! +//! We can then do queries using T2. These will give back constraints +//! on the canonical variables which can be translated, using the map +//! M, into constraints in our source context. This process of +//! translating the results back is done by the +//! `instantiate_query_result` method. +//! +//! For a more detailed look at what is happening here, check +//! out the [chapter in the rustc dev guide][c]. +//! +//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html + +use crate::next_solver::{ + AliasTy, Binder, Canonical, CanonicalVarValues, CanonicalVars, Const, DbInterner, GenericArg, + Goal, ParamEnv, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Predicate, PredicateKind, + Region, Ty, TyKind, + infer::{ + DefineOpaqueTypes, InferCtxt, TypeTrace, + traits::{Obligation, PredicateObligations}, + }, +}; +use instantiate::CanonicalExt; +use rustc_index::IndexVec; +use rustc_type_ir::inherent::IntoKind; +use rustc_type_ir::{ + AliasRelationDirection, AliasTyKind, CanonicalVarKind, InferTy, TypeFoldable, UniverseIndex, + Upcast, Variance, + inherent::{SliceLike, Ty as _}, + relate::{ + Relate, TypeRelation, VarianceDiagInfo, + combine::{super_combine_consts, super_combine_tys}, + }, +}; + +pub mod canonicalizer; +pub mod instantiate; + +impl<'db> InferCtxt<'db> { + /// Creates an instantiation S for the canonical value with fresh inference + /// variables and placeholders then applies it to the canonical value. + /// Returns both the instantiated result *and* the instantiation S. + /// + /// This can be invoked as part of constructing an + /// inference context at the start of a query (see + /// `InferCtxtBuilder::build_with_canonical`). It basically + /// brings the canonical value "into scope" within your new infcx. + /// + /// At the end of processing, the instantiation S (once + /// canonicalized) then represents the values that you computed + /// for each of the canonical inputs to your query. + pub fn instantiate_canonical( + &self, + canonical: &Canonical<'db, T>, + ) -> (T, CanonicalVarValues<'db>) + where + T: TypeFoldable>, + { + // For each universe that is referred to in the incoming + // query, create a universe in our local inference context. In + // practice, as of this writing, all queries have no universes + // in them, so this code has no effect, but it is looking + // forward to the day when we *do* want to carry universes + // through into queries. + // + // Instantiate the root-universe content into the current universe, + // and create fresh universes for the higher universes. + let universes: IndexVec = std::iter::once(self.universe()) + .chain((1..=canonical.max_universe.as_u32()).map(|_| self.create_next_universe())) + .collect(); + + let var_values = CanonicalVarValues::instantiate( + self.interner, + canonical.variables, + |var_values, info| self.instantiate_canonical_var(info, var_values, |ui| universes[ui]), + ); + let result = canonical.instantiate(self.interner, &var_values); + (result, var_values) + } + + /// Given the "info" about a canonical variable, creates a fresh + /// variable for it. If this is an existentially quantified + /// variable, then you'll get a new inference variable; if it is a + /// universally quantified variable, you get a placeholder. + /// + /// FIXME(-Znext-solver): This is public because it's used by the + /// new trait solver which has a different canonicalization routine. + /// We should somehow deduplicate all of this. + pub fn instantiate_canonical_var( + &self, + cv_info: CanonicalVarKind>, + previous_var_values: &[GenericArg<'db>], + universe_map: impl Fn(UniverseIndex) -> UniverseIndex, + ) -> GenericArg<'db> { + match cv_info { + CanonicalVarKind::Ty { ui, sub_root } => { + let vid = self.next_ty_var_id_in_universe(universe_map(ui)); + // If this inference variable is related to an earlier variable + // via subtyping, we need to add that info to the inference context. + if let Some(prev) = previous_var_values.get(sub_root.as_usize()) { + if let TyKind::Infer(InferTy::TyVar(sub_root)) = prev.expect_ty().kind() { + self.sub_unify_ty_vids_raw(vid, sub_root); + } else { + unreachable!() + } + } + Ty::new_var(self.interner, vid).into() + } + + CanonicalVarKind::Int => self.next_int_var().into(), + + CanonicalVarKind::Float => self.next_float_var().into(), + + CanonicalVarKind::PlaceholderTy(PlaceholderTy { universe, bound }) => { + let universe_mapped = universe_map(universe); + let placeholder_mapped = PlaceholderTy { universe: universe_mapped, bound }; + Ty::new_placeholder(self.interner, placeholder_mapped).into() + } + + CanonicalVarKind::Region(ui) => { + self.next_region_var_in_universe(universe_map(ui)).into() + } + + CanonicalVarKind::PlaceholderRegion(PlaceholderRegion { universe, bound }) => { + let universe_mapped = universe_map(universe); + let placeholder_mapped: crate::next_solver::Placeholder< + crate::next_solver::BoundRegion, + > = PlaceholderRegion { universe: universe_mapped, bound }; + Region::new_placeholder(self.interner, placeholder_mapped).into() + } + + CanonicalVarKind::Const(ui) => self.next_const_var_in_universe(universe_map(ui)).into(), + CanonicalVarKind::PlaceholderConst(PlaceholderConst { universe, bound }) => { + let universe_mapped = universe_map(universe); + let placeholder_mapped = PlaceholderConst { universe: universe_mapped, bound }; + Const::new_placeholder(self.interner, placeholder_mapped).into() + } + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/context.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/context.rs new file mode 100644 index 0000000000000..5aa5ad14af551 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/context.rs @@ -0,0 +1,333 @@ +//! Definition of `InferCtxtLike` from the librarified type layer. + +use rustc_type_ir::{ + ConstVid, FloatVarValue, FloatVid, GenericArgKind, InferConst, InferTy, IntTy, IntVarValue, + IntVid, RegionVid, TyVid, TypeFoldable, TypingMode, UniverseIndex, + inherent::{Const as _, IntoKind, Span as _, Ty as _}, + relate::combine::PredicateEmittingRelation, +}; + +use crate::next_solver::{ + Binder, Const, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, OpaqueTypeKey, ParamEnv, + Region, SolverDefId, Span, Ty, TyKind, + infer::opaque_types::{OpaqueHiddenType, table::OpaqueTypeStorageEntries}, +}; + +use super::{BoundRegionConversionTime, InferCtxt, relate::RelateResult, traits::ObligationCause}; + +impl<'db> rustc_type_ir::InferCtxtLike for InferCtxt<'db> { + type Interner = DbInterner<'db>; + + fn cx(&self) -> DbInterner<'db> { + self.interner + } + + fn next_trait_solver(&self) -> bool { + true + } + + fn typing_mode(&self) -> TypingMode> { + self.typing_mode() + } + + fn universe(&self) -> UniverseIndex { + self.universe() + } + + fn create_next_universe(&self) -> UniverseIndex { + self.create_next_universe() + } + + fn universe_of_ty(&self, vid: TyVid) -> Option { + self.probe_ty_var(vid).err() + } + + fn universe_of_lt(&self, lt: RegionVid) -> Option { + self.inner.borrow_mut().unwrap_region_constraints().probe_value(lt).err() + } + + fn universe_of_ct(&self, ct: ConstVid) -> Option { + self.probe_const_var(ct).err() + } + + fn root_ty_var(&self, var: TyVid) -> TyVid { + self.root_var(var) + } + + fn root_const_var(&self, var: ConstVid) -> ConstVid { + self.root_const_var(var) + } + + fn opportunistic_resolve_ty_var(&self, vid: TyVid) -> Ty<'db> { + match self.probe_ty_var(vid) { + Ok(ty) => ty, + Err(_) => Ty::new_var(self.interner, self.root_var(vid)), + } + } + + fn opportunistic_resolve_int_var(&self, vid: IntVid) -> Ty<'db> { + self.opportunistic_resolve_int_var(vid) + } + + fn opportunistic_resolve_float_var(&self, vid: FloatVid) -> Ty<'db> { + self.opportunistic_resolve_float_var(vid) + } + + fn opportunistic_resolve_ct_var(&self, vid: ConstVid) -> Const<'db> { + match self.probe_const_var(vid) { + Ok(ct) => ct, + Err(_) => Const::new_var(self.interner, self.root_const_var(vid)), + } + } + + fn opportunistic_resolve_lt_var(&self, vid: RegionVid) -> Region<'db> { + self.inner + .borrow_mut() + .unwrap_region_constraints() + .opportunistic_resolve_var(self.interner, vid) + } + + fn is_changed_arg(&self, arg: ::GenericArg) -> bool { + match arg.kind() { + GenericArgKind::Lifetime(_) => { + // Lifetimes should not change affect trait selection. + false + } + GenericArgKind::Type(ty) => { + if let TyKind::Infer(infer_ty) = ty.kind() { + match infer_ty { + InferTy::TyVar(vid) => { + !self.probe_ty_var(vid).is_err_and(|_| self.root_var(vid) == vid) + } + InferTy::IntVar(vid) => { + let mut inner = self.inner.borrow_mut(); + !matches!( + inner.int_unification_table().probe_value(vid), + IntVarValue::Unknown + if inner.int_unification_table().find(vid) == vid + ) + } + InferTy::FloatVar(vid) => { + let mut inner = self.inner.borrow_mut(); + !matches!( + inner.float_unification_table().probe_value(vid), + FloatVarValue::Unknown + if inner.float_unification_table().find(vid) == vid + ) + } + InferTy::FreshTy(_) | InferTy::FreshIntTy(_) | InferTy::FreshFloatTy(_) => { + true + } + } + } else { + true + } + } + GenericArgKind::Const(ct) => { + if let ConstKind::Infer(infer_ct) = ct.kind() { + match infer_ct { + InferConst::Var(vid) => !self + .probe_const_var(vid) + .is_err_and(|_| self.root_const_var(vid) == vid), + InferConst::Fresh(_) => true, + } + } else { + true + } + } + } + } + + fn next_ty_infer(&self) -> Ty<'db> { + self.next_ty_var() + } + + fn next_region_infer(&self) -> ::Region { + self.next_region_var() + } + + fn next_const_infer(&self) -> Const<'db> { + self.next_const_var() + } + + fn fresh_args_for_item(&self, def_id: SolverDefId) -> GenericArgs<'db> { + self.fresh_args_for_item(def_id) + } + + fn instantiate_binder_with_infer> + Clone>( + &self, + value: Binder<'db, T>, + ) -> T { + self.instantiate_binder_with_fresh_vars(BoundRegionConversionTime::HigherRankedType, value) + } + + fn enter_forall> + Clone, U>( + &self, + value: Binder<'db, T>, + f: impl FnOnce(T) -> U, + ) -> U { + self.enter_forall(value, f) + } + + fn equate_ty_vids_raw(&self, a: rustc_type_ir::TyVid, b: rustc_type_ir::TyVid) { + self.inner.borrow_mut().type_variables().equate(a, b); + } + + fn equate_int_vids_raw(&self, a: rustc_type_ir::IntVid, b: rustc_type_ir::IntVid) { + self.inner.borrow_mut().int_unification_table().union(a, b); + } + + fn equate_float_vids_raw(&self, a: rustc_type_ir::FloatVid, b: rustc_type_ir::FloatVid) { + self.inner.borrow_mut().float_unification_table().union(a, b); + } + + fn equate_const_vids_raw(&self, a: rustc_type_ir::ConstVid, b: rustc_type_ir::ConstVid) { + self.inner.borrow_mut().const_unification_table().union(a, b); + } + + fn instantiate_ty_var_raw>( + &self, + relation: &mut R, + target_is_expected: bool, + target_vid: rustc_type_ir::TyVid, + instantiation_variance: rustc_type_ir::Variance, + source_ty: Ty<'db>, + ) -> RelateResult<'db, ()> { + self.instantiate_ty_var( + relation, + target_is_expected, + target_vid, + instantiation_variance, + source_ty, + ) + } + + fn instantiate_int_var_raw( + &self, + vid: rustc_type_ir::IntVid, + value: rustc_type_ir::IntVarValue, + ) { + self.inner.borrow_mut().int_unification_table().union_value(vid, value); + } + + fn instantiate_float_var_raw( + &self, + vid: rustc_type_ir::FloatVid, + value: rustc_type_ir::FloatVarValue, + ) { + self.inner.borrow_mut().float_unification_table().union_value(vid, value); + } + + fn instantiate_const_var_raw>( + &self, + relation: &mut R, + target_is_expected: bool, + target_vid: rustc_type_ir::ConstVid, + source_ct: Const<'db>, + ) -> RelateResult<'db, ()> { + self.instantiate_const_var(relation, target_is_expected, target_vid, source_ct) + } + + fn set_tainted_by_errors(&self, e: ErrorGuaranteed) { + self.set_tainted_by_errors(e) + } + + fn shallow_resolve(&self, ty: Ty<'db>) -> Ty<'db> { + self.shallow_resolve(ty) + } + fn shallow_resolve_const(&self, ct: Const<'db>) -> Const<'db> { + self.shallow_resolve_const(ct) + } + + fn resolve_vars_if_possible(&self, value: T) -> T + where + T: TypeFoldable>, + { + self.resolve_vars_if_possible(value) + } + + fn probe(&self, probe: impl FnOnce() -> T) -> T { + self.probe(|_| probe()) + } + + fn sub_regions(&self, sub: Region<'db>, sup: Region<'db>, span: Span) { + self.inner.borrow_mut().unwrap_region_constraints().make_subregion(sub, sup); + } + + fn equate_regions(&self, a: Region<'db>, b: Region<'db>, span: Span) { + self.inner.borrow_mut().unwrap_region_constraints().make_eqregion(a, b); + } + + fn register_ty_outlives(&self, ty: Ty<'db>, r: Region<'db>, span: Span) { + //self.register_region_obligation_with_cause(ty, r, &ObligationCause::dummy_with_span(Span::dummy())); + } + + type OpaqueTypeStorageEntries = OpaqueTypeStorageEntries; + + fn opaque_types_storage_num_entries(&self) -> OpaqueTypeStorageEntries { + self.inner.borrow_mut().opaque_types().num_entries() + } + fn clone_opaque_types_lookup_table(&self) -> Vec<(OpaqueTypeKey<'db>, Ty<'db>)> { + self.inner.borrow_mut().opaque_types().iter_lookup_table().map(|(k, h)| (k, h.ty)).collect() + } + fn clone_duplicate_opaque_types(&self) -> Vec<(OpaqueTypeKey<'db>, Ty<'db>)> { + self.inner + .borrow_mut() + .opaque_types() + .iter_duplicate_entries() + .map(|(k, h)| (k, h.ty)) + .collect() + } + fn clone_opaque_types_added_since( + &self, + prev_entries: OpaqueTypeStorageEntries, + ) -> Vec<(OpaqueTypeKey<'db>, Ty<'db>)> { + self.inner + .borrow_mut() + .opaque_types() + .opaque_types_added_since(prev_entries) + .map(|(k, h)| (k, h.ty)) + .collect() + } + + fn register_hidden_type_in_storage( + &self, + opaque_type_key: OpaqueTypeKey<'db>, + hidden_ty: Ty<'db>, + _span: Span, + ) -> Option> { + self.register_hidden_type_in_storage(opaque_type_key, OpaqueHiddenType { ty: hidden_ty }) + } + fn add_duplicate_opaque_type( + &self, + opaque_type_key: OpaqueTypeKey<'db>, + hidden_ty: Ty<'db>, + _span: Span, + ) { + self.inner + .borrow_mut() + .opaque_types() + .add_duplicate(opaque_type_key, OpaqueHiddenType { ty: hidden_ty }) + } + + fn reset_opaque_types(&self) { + let _ = self.take_opaque_types(); + } + + fn sub_unification_table_root_var(&self, var: rustc_type_ir::TyVid) -> rustc_type_ir::TyVid { + self.sub_unification_table_root_var(var) + } + + fn sub_unify_ty_vids_raw(&self, a: rustc_type_ir::TyVid, b: rustc_type_ir::TyVid) { + self.sub_unify_ty_vids_raw(a, b); + } + + fn opaques_with_sub_unified_hidden_type( + &self, + _ty: TyVid, + ) -> Vec> { + // FIXME: I guess we are okay without this for now since currently r-a lacks of + // detailed checks over opaque types. Might need to implement this in future. + vec![] + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs new file mode 100644 index 0000000000000..8e922abacb206 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs @@ -0,0 +1,1130 @@ +//! Infer context the next-trait-solver. + +use std::cell::{Cell, RefCell}; +use std::fmt; +use std::ops::Range; +use std::sync::Arc; + +pub use BoundRegionConversionTime::*; +pub use at::DefineOpaqueTypes; +use ena::undo_log::UndoLogs; +use ena::unify as ut; +use hir_def::GenericParamId; +use intern::Symbol; +use opaque_types::{OpaqueHiddenType, OpaqueTypeStorage}; +use region_constraints::{ + GenericKind, RegionConstraintCollector, RegionConstraintStorage, UndoLog, VarInfos, VerifyBound, +}; +pub use relate::StructurallyRelateAliases; +pub use relate::combine::PredicateEmittingRelation; +use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_pattern_analysis::Captures; +use rustc_type_ir::error::{ExpectedFound, TypeError}; +use rustc_type_ir::inherent::{ + Const as _, GenericArg as _, GenericArgs as _, IntoKind, ParamEnv as _, SliceLike, Term as _, + Ty as _, +}; +use rustc_type_ir::{ + BoundVar, ClosureKind, ConstVid, FloatTy, FloatVarValue, FloatVid, GenericArgKind, InferConst, + InferTy, IntTy, IntVarValue, IntVid, OutlivesPredicate, RegionVid, TyVid, UniverseIndex, +}; +use rustc_type_ir::{TermKind, TypeVisitableExt}; +use rustc_type_ir::{TypeFoldable, TypeFolder, TypeSuperFoldable}; +use snapshot::undo_log::InferCtxtUndoLogs; +use tracing::{debug, instrument}; +use traits::{ObligationCause, PredicateObligations}; +use type_variable::TypeVariableOrigin; +use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey}; + +use crate::next_solver::fold::BoundVarReplacerDelegate; +use crate::next_solver::infer::opaque_types::table::OpaqueTypeStorageEntries; +use crate::next_solver::{BoundConst, BoundRegion, BoundTy, BoundVarKind}; + +use super::generics::GenericParamDef; +use super::{ + AliasTerm, Binder, BoundRegionKind, CanonicalQueryInput, CanonicalVarValues, Const, ConstKind, + DbInterner, ErrorGuaranteed, FxIndexMap, GenericArg, GenericArgs, OpaqueTypeKey, ParamEnv, + PlaceholderRegion, PolyCoercePredicate, PolyExistentialProjection, PolyExistentialTraitRef, + PolyFnSig, PolyRegionOutlivesPredicate, PolySubtypePredicate, Predicate, Region, SolverDefId, + SubtypePredicate, Term, TraitPredicate, TraitRef, Ty, TyKind, TypingMode, +}; + +pub mod at; +pub mod canonical; +mod context; +mod opaque_types; +pub mod region_constraints; +pub mod relate; +pub mod resolve; +pub(crate) mod select; +pub(crate) mod snapshot; +pub(crate) mod traits; +mod type_variable; +mod unify_key; + +/// `InferOk<'tcx, ()>` is used a lot. It may seem like a useless wrapper +/// around `PredicateObligations`, but it has one important property: +/// because `InferOk` is marked with `#[must_use]`, if you have a method +/// `InferCtxt::f` that returns `InferResult<()>` and you call it with +/// `infcx.f()?;` you'll get a warning about the obligations being discarded +/// without use, which is probably unintentional and has been a source of bugs +/// in the past. +#[must_use] +#[derive(Debug)] +pub struct InferOk<'db, T> { + pub value: T, + pub obligations: PredicateObligations<'db>, +} +pub type InferResult<'db, T> = Result, TypeError>>; + +pub(crate) type FixupResult = Result; // "fixup result" + +pub(crate) type UnificationTable<'a, 'db, T> = ut::UnificationTable< + ut::InPlace, &'a mut InferCtxtUndoLogs<'db>>, +>; + +fn iter_idx_range + Into>(range: Range) -> impl Iterator { + (range.start.into()..range.end.into()).map(Into::into) +} + +/// This type contains all the things within `InferCtxt` that sit within a +/// `RefCell` and are involved with taking/rolling back snapshots. Snapshot +/// operations are hot enough that we want only one call to `borrow_mut` per +/// call to `start_snapshot` and `rollback_to`. +#[derive(Clone)] +pub struct InferCtxtInner<'db> { + pub(crate) undo_log: InferCtxtUndoLogs<'db>, + + /// We instantiate `UnificationTable` with `bounds` because the types + /// that might instantiate a general type variable have an order, + /// represented by its upper and lower bounds. + pub(crate) type_variable_storage: type_variable::TypeVariableStorage<'db>, + + /// Map from const parameter variable to the kind of const it represents. + pub(crate) const_unification_storage: ut::UnificationTableStorage>, + + /// Map from integral variable to the kind of integer it represents. + pub(crate) int_unification_storage: ut::UnificationTableStorage, + + /// Map from floating variable to the kind of float it represents. + pub(crate) float_unification_storage: ut::UnificationTableStorage, + + /// Tracks the set of region variables and the constraints between them. + /// + /// This is initially `Some(_)` but when + /// `resolve_regions_and_report_errors` is invoked, this gets set to `None` + /// -- further attempts to perform unification, etc., may fail if new + /// region constraints would've been added. + pub(crate) region_constraint_storage: Option>, + + /// A set of constraints that regionck must validate. + /// + /// Each constraint has the form `T:'a`, meaning "some type `T` must + /// outlive the lifetime 'a". These constraints derive from + /// instantiated type parameters. So if you had a struct defined + /// like the following: + /// ```ignore (illustrative) + /// struct Foo { ... } + /// ``` + /// In some expression `let x = Foo { ... }`, it will + /// instantiate the type parameter `T` with a fresh type `$0`. At + /// the same time, it will record a region obligation of + /// `$0: 'static`. This will get checked later by regionck. (We + /// can't generally check these things right away because we have + /// to wait until types are resolved.) + /// + /// These are stored in a map keyed to the id of the innermost + /// enclosing fn body / static initializer expression. This is + /// because the location where the obligation was incurred can be + /// relevant with respect to which sublifetime assumptions are in + /// place. The reason that we store under the fn-id, and not + /// something more fine-grained, is so that it is easier for + /// regionck to be sure that it has found *all* the region + /// obligations (otherwise, it's easy to fail to walk to a + /// particular node-id). + /// + /// Before running `resolve_regions_and_report_errors`, the creator + /// of the inference context is expected to invoke + /// [`InferCtxt::process_registered_region_obligations`] + /// for each body-id in this map, which will process the + /// obligations within. This is expected to be done 'late enough' + /// that all type inference variables have been bound and so forth. + pub(crate) region_obligations: Vec>, + + /// Caches for opaque type inference. + pub(crate) opaque_type_storage: OpaqueTypeStorage<'db>, +} + +impl<'db> InferCtxtInner<'db> { + fn new() -> InferCtxtInner<'db> { + InferCtxtInner { + undo_log: InferCtxtUndoLogs::default(), + + type_variable_storage: Default::default(), + const_unification_storage: Default::default(), + int_unification_storage: Default::default(), + float_unification_storage: Default::default(), + region_constraint_storage: Some(Default::default()), + region_obligations: vec![], + opaque_type_storage: Default::default(), + } + } + + #[inline] + pub fn region_obligations(&self) -> &[RegionObligation<'db>] { + &self.region_obligations + } + + #[inline] + fn try_type_variables_probe_ref( + &self, + vid: TyVid, + ) -> Option<&type_variable::TypeVariableValue<'db>> { + // Uses a read-only view of the unification table, this way we don't + // need an undo log. + self.type_variable_storage.eq_relations_ref().try_probe_value(vid) + } + + #[inline] + fn type_variables(&mut self) -> type_variable::TypeVariableTable<'_, 'db> { + self.type_variable_storage.with_log(&mut self.undo_log) + } + + #[inline] + pub(crate) fn opaque_types(&mut self) -> opaque_types::OpaqueTypeTable<'_, 'db> { + self.opaque_type_storage.with_log(&mut self.undo_log) + } + + #[inline] + pub(crate) fn int_unification_table(&mut self) -> UnificationTable<'_, 'db, IntVid> { + tracing::debug!(?self.int_unification_storage); + self.int_unification_storage.with_log(&mut self.undo_log) + } + + #[inline] + pub(crate) fn float_unification_table(&mut self) -> UnificationTable<'_, 'db, FloatVid> { + self.float_unification_storage.with_log(&mut self.undo_log) + } + + #[inline] + fn const_unification_table(&mut self) -> UnificationTable<'_, 'db, ConstVidKey<'db>> { + self.const_unification_storage.with_log(&mut self.undo_log) + } + + #[inline] + pub fn unwrap_region_constraints(&mut self) -> RegionConstraintCollector<'db, '_> { + self.region_constraint_storage + .as_mut() + .expect("region constraints already solved") + .with_log(&mut self.undo_log) + } +} + +#[derive(Clone)] +pub struct InferCtxt<'db> { + pub interner: DbInterner<'db>, + + /// The mode of this inference context, see the struct documentation + /// for more details. + typing_mode: TypingMode<'db>, + + pub inner: RefCell>, + + /// When an error occurs, we want to avoid reporting "derived" + /// errors that are due to this original failure. We have this + /// flag that one can set whenever one creates a type-error that + /// is due to an error in a prior pass. + /// + /// Don't read this flag directly, call `is_tainted_by_errors()` + /// and `set_tainted_by_errors()`. + tainted_by_errors: Cell>, + + /// What is the innermost universe we have created? Starts out as + /// `UniverseIndex::root()` but grows from there as we enter + /// universal quantifiers. + /// + /// N.B., at present, we exclude the universal quantifiers on the + /// item we are type-checking, and just consider those names as + /// part of the root universe. So this would only get incremented + /// when we enter into a higher-ranked (`for<..>`) type or trait + /// bound. + universe: Cell, +} + +/// See the `error_reporting` module for more details. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ValuePairs<'db> { + Regions(ExpectedFound>), + Terms(ExpectedFound>), + Aliases(ExpectedFound>), + TraitRefs(ExpectedFound>), + PolySigs(ExpectedFound>), + ExistentialTraitRef(ExpectedFound>), + ExistentialProjection(ExpectedFound>), +} + +impl<'db> ValuePairs<'db> { + pub fn ty(&self) -> Option<(Ty<'db>, Ty<'db>)> { + if let ValuePairs::Terms(ExpectedFound { expected, found }) = self + && let Some(expected) = expected.as_type() + && let Some(found) = found.as_type() + { + return Some((expected, found)); + } + None + } +} + +/// The trace designates the path through inference that we took to +/// encounter an error or subtyping constraint. +/// +/// See the `error_reporting` module for more details. +#[derive(Clone, Debug)] +pub struct TypeTrace<'db> { + pub cause: ObligationCause, + pub values: ValuePairs<'db>, +} + +/// Times when we replace bound regions with existentials: +#[derive(Clone, Copy, Debug)] +pub enum BoundRegionConversionTime { + /// when a fn is called + FnCall, + + /// when two higher-ranked types are compared + HigherRankedType, + + /// when projecting an associated type + AssocTypeProjection(SolverDefId), +} + +#[derive(Copy, Clone, Debug)] +pub struct FixupError { + unresolved: TyOrConstInferVar, +} + +impl fmt::Display for FixupError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use TyOrConstInferVar::*; + + match self.unresolved { + TyInt(_) => write!( + f, + "cannot determine the type of this integer; \ + add a suffix to specify the type explicitly" + ), + TyFloat(_) => write!( + f, + "cannot determine the type of this number; \ + add a suffix to specify the type explicitly" + ), + Ty(_) => write!(f, "unconstrained type"), + Const(_) => write!(f, "unconstrained const value"), + } + } +} + +/// See the `region_obligations` field for more information. +#[derive(Clone, Debug)] +pub struct RegionObligation<'db> { + pub sub_region: Region<'db>, + pub sup_type: Ty<'db>, +} + +/// Used to configure inference contexts before their creation. +pub struct InferCtxtBuilder<'db> { + interner: DbInterner<'db>, +} + +pub trait DbInternerInferExt<'db> { + fn infer_ctxt(self) -> InferCtxtBuilder<'db>; +} + +impl<'db> DbInternerInferExt<'db> for DbInterner<'db> { + fn infer_ctxt(self) -> InferCtxtBuilder<'db> { + InferCtxtBuilder { interner: self } + } +} + +impl<'db> InferCtxtBuilder<'db> { + /// Given a canonical value `C` as a starting point, create an + /// inference context that contains each of the bound values + /// within instantiated as a fresh variable. The `f` closure is + /// invoked with the new infcx, along with the instantiated value + /// `V` and a instantiation `S`. This instantiation `S` maps from + /// the bound values in `C` to their instantiated values in `V` + /// (in other words, `S(C) = V`). + pub fn build_with_canonical( + mut self, + input: &CanonicalQueryInput<'db, T>, + ) -> (InferCtxt<'db>, T, CanonicalVarValues<'db>) + where + T: TypeFoldable>, + { + let infcx = self.build(input.typing_mode); + let (value, args) = infcx.instantiate_canonical(&input.canonical); + (infcx, value, args) + } + + pub fn build(&mut self, typing_mode: TypingMode<'db>) -> InferCtxt<'db> { + let InferCtxtBuilder { interner } = *self; + InferCtxt { + interner, + typing_mode, + inner: RefCell::new(InferCtxtInner::new()), + tainted_by_errors: Cell::new(None), + universe: Cell::new(UniverseIndex::ROOT), + } + } +} + +impl<'db> InferOk<'db, ()> { + pub fn into_obligations(self) -> PredicateObligations<'db> { + self.obligations + } +} + +impl<'db> InferCtxt<'db> { + #[inline(always)] + pub fn typing_mode(&self) -> TypingMode<'db> { + self.typing_mode + } + + #[inline(always)] + pub fn typing_mode_unchecked(&self) -> TypingMode<'db> { + self.typing_mode + } + + pub fn unresolved_variables(&self) -> Vec> { + let mut inner = self.inner.borrow_mut(); + let mut vars: Vec> = inner + .type_variables() + .unresolved_variables() + .into_iter() + .map(|t| Ty::new_var(self.interner, t)) + .collect(); + vars.extend( + (0..inner.int_unification_table().len()) + .map(IntVid::from_usize) + .filter(|&vid| inner.int_unification_table().probe_value(vid).is_unknown()) + .map(|v| Ty::new_int_var(self.interner, v)), + ); + vars.extend( + (0..inner.float_unification_table().len()) + .map(FloatVid::from_usize) + .filter(|&vid| inner.float_unification_table().probe_value(vid).is_unknown()) + .map(|v| Ty::new_float_var(self.interner, v)), + ); + vars + } + + #[instrument(skip(self), level = "debug")] + pub fn sub_regions(&self, a: Region<'db>, b: Region<'db>) { + self.inner.borrow_mut().unwrap_region_constraints().make_subregion(a, b); + } + + /// Processes a `Coerce` predicate from the fulfillment context. + /// This is NOT the preferred way to handle coercion, which is to + /// invoke `FnCtxt::coerce` or a similar method (see `coercion.rs`). + /// + /// This method here is actually a fallback that winds up being + /// invoked when `FnCtxt::coerce` encounters unresolved type variables + /// and records a coercion predicate. Presently, this method is equivalent + /// to `subtype_predicate` -- that is, "coercing" `a` to `b` winds up + /// actually requiring `a <: b`. This is of course a valid coercion, + /// but it's not as flexible as `FnCtxt::coerce` would be. + /// + /// (We may refactor this in the future, but there are a number of + /// practical obstacles. Among other things, `FnCtxt::coerce` presently + /// records adjustments that are required on the HIR in order to perform + /// the coercion, and we don't currently have a way to manage that.) + pub fn coerce_predicate( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + predicate: PolyCoercePredicate<'db>, + ) -> Result, (TyVid, TyVid)> { + let subtype_predicate = predicate.map_bound(|p| SubtypePredicate { + a_is_expected: false, // when coercing from `a` to `b`, `b` is expected + a: p.a, + b: p.b, + }); + self.subtype_predicate(cause, param_env, subtype_predicate) + } + + pub fn subtype_predicate( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + predicate: PolySubtypePredicate<'db>, + ) -> Result, (TyVid, TyVid)> { + // Check for two unresolved inference variables, in which case we can + // make no progress. This is partly a micro-optimization, but it's + // also an opportunity to "sub-unify" the variables. This isn't + // *necessary* to prevent cycles, because they would eventually be sub-unified + // anyhow during generalization, but it helps with diagnostics (we can detect + // earlier that they are sub-unified). + // + // Note that we can just skip the binders here because + // type variables can't (at present, at + // least) capture any of the things bound by this binder. + // + // Note that this sub here is not just for diagnostics - it has semantic + // effects as well. + let r_a = self.shallow_resolve(predicate.skip_binder().a); + let r_b = self.shallow_resolve(predicate.skip_binder().b); + match (r_a.kind(), r_b.kind()) { + (TyKind::Infer(InferTy::TyVar(a_vid)), TyKind::Infer(InferTy::TyVar(b_vid))) => { + return Err((a_vid, b_vid)); + } + _ => {} + } + + self.enter_forall(predicate, |SubtypePredicate { a_is_expected, a, b }| { + if a_is_expected { + Ok(self.at(cause, param_env).sub(DefineOpaqueTypes::Yes, a, b)) + } else { + Ok(self.at(cause, param_env).sup(DefineOpaqueTypes::Yes, b, a)) + } + }) + } + + pub fn region_outlives_predicate( + &self, + cause: &traits::ObligationCause, + predicate: PolyRegionOutlivesPredicate<'db>, + ) { + self.enter_forall(predicate, |OutlivesPredicate(r_a, r_b)| { + self.sub_regions(r_b, r_a); // `b : a` ==> `a <= b` + }) + } + + /// Number of type variables created so far. + pub fn num_ty_vars(&self) -> usize { + self.inner.borrow_mut().type_variables().num_vars() + } + + pub fn next_ty_var(&self) -> Ty<'db> { + self.next_ty_var_with_origin(TypeVariableOrigin { param_def_id: None }) + } + + pub fn next_ty_vid(&self) -> TyVid { + self.inner + .borrow_mut() + .type_variables() + .new_var(self.universe(), TypeVariableOrigin { param_def_id: None }) + } + + pub fn next_ty_var_with_origin(&self, origin: TypeVariableOrigin) -> Ty<'db> { + let vid = self.inner.borrow_mut().type_variables().new_var(self.universe(), origin); + Ty::new_var(self.interner, vid) + } + + pub fn next_ty_var_id_in_universe(&self, universe: UniverseIndex) -> TyVid { + let origin = TypeVariableOrigin { param_def_id: None }; + self.inner.borrow_mut().type_variables().new_var(universe, origin) + } + + pub fn next_ty_var_in_universe(&self, universe: UniverseIndex) -> Ty<'db> { + let vid = self.next_ty_var_id_in_universe(universe); + Ty::new_var(self.interner, vid) + } + + pub fn next_const_var(&self) -> Const<'db> { + self.next_const_var_with_origin(ConstVariableOrigin { param_def_id: None }) + } + + pub fn next_const_vid(&self) -> ConstVid { + self.inner + .borrow_mut() + .const_unification_table() + .new_key(ConstVariableValue::Unknown { + origin: ConstVariableOrigin { param_def_id: None }, + universe: self.universe(), + }) + .vid + } + + pub fn next_const_var_with_origin(&self, origin: ConstVariableOrigin) -> Const<'db> { + let vid = self + .inner + .borrow_mut() + .const_unification_table() + .new_key(ConstVariableValue::Unknown { origin, universe: self.universe() }) + .vid; + Const::new_var(self.interner, vid) + } + + pub fn next_const_var_in_universe(&self, universe: UniverseIndex) -> Const<'db> { + let origin = ConstVariableOrigin { param_def_id: None }; + let vid = self + .inner + .borrow_mut() + .const_unification_table() + .new_key(ConstVariableValue::Unknown { origin, universe }) + .vid; + Const::new_var(self.interner, vid) + } + + pub fn next_int_var(&self) -> Ty<'db> { + let next_int_var_id = + self.inner.borrow_mut().int_unification_table().new_key(IntVarValue::Unknown); + Ty::new_int_var(self.interner, next_int_var_id) + } + + pub fn next_int_vid(&self) -> IntVid { + self.inner.borrow_mut().int_unification_table().new_key(IntVarValue::Unknown) + } + + pub fn next_float_var(&self) -> Ty<'db> { + Ty::new_float_var(self.interner, self.next_float_vid()) + } + + pub fn next_float_vid(&self) -> FloatVid { + self.inner.borrow_mut().float_unification_table().new_key(FloatVarValue::Unknown) + } + + /// Creates a fresh region variable with the next available index. + /// The variable will be created in the maximum universe created + /// thus far, allowing it to name any region created thus far. + pub fn next_region_var(&self) -> Region<'db> { + self.next_region_var_in_universe(self.universe()) + } + + pub fn next_region_vid(&self) -> RegionVid { + self.inner.borrow_mut().unwrap_region_constraints().new_region_var(self.universe()) + } + + /// Creates a fresh region variable with the next available index + /// in the given universe; typically, you can use + /// `next_region_var` and just use the maximal universe. + pub fn next_region_var_in_universe(&self, universe: UniverseIndex) -> Region<'db> { + let region_var = + self.inner.borrow_mut().unwrap_region_constraints().new_region_var(universe); + Region::new_var(self.interner, region_var) + } + + pub fn next_term_var_of_kind(&self, term: Term<'db>) -> Term<'db> { + match term.kind() { + TermKind::Ty(_) => self.next_ty_var().into(), + TermKind::Const(_) => self.next_const_var().into(), + } + } + + /// Return the universe that the region `r` was created in. For + /// most regions (e.g., `'static`, named regions from the user, + /// etc) this is the root universe U0. For inference variables or + /// placeholders, however, it will return the universe which they + /// are associated. + pub fn universe_of_region(&self, r: Region<'db>) -> UniverseIndex { + self.inner.borrow_mut().unwrap_region_constraints().universe(r) + } + + /// Number of region variables created so far. + pub fn num_region_vars(&self) -> usize { + self.inner.borrow_mut().unwrap_region_constraints().num_region_vars() + } + + /// Just a convenient wrapper of `next_region_var` for using during NLL. + #[instrument(skip(self), level = "debug")] + pub fn next_nll_region_var(&self) -> Region<'db> { + self.next_region_var() + } + + /// Just a convenient wrapper of `next_region_var` for using during NLL. + #[instrument(skip(self), level = "debug")] + pub fn next_nll_region_var_in_universe(&self, universe: UniverseIndex) -> Region<'db> { + self.next_region_var_in_universe(universe) + } + + fn var_for_def(&self, id: GenericParamId, name: &Symbol) -> GenericArg<'db> { + match id { + GenericParamId::LifetimeParamId(_) => { + // Create a region inference variable for the given + // region parameter definition. + self.next_region_var().into() + } + GenericParamId::TypeParamId(_) => { + // Create a type inference variable for the given + // type parameter definition. The generic parameters are + // for actual parameters that may be referred to by + // the default of this type parameter, if it exists. + // e.g., `struct Foo(...);` when + // used in a path such as `Foo::::new()` will + // use an inference variable for `C` with `[T, U]` + // as the generic parameters for the default, `(T, U)`. + let ty_var_id = self + .inner + .borrow_mut() + .type_variables() + .new_var(self.universe(), TypeVariableOrigin { param_def_id: None }); + + Ty::new_var(self.interner, ty_var_id).into() + } + GenericParamId::ConstParamId(_) => { + let origin = ConstVariableOrigin { param_def_id: None }; + let const_var_id = self + .inner + .borrow_mut() + .const_unification_table() + .new_key(ConstVariableValue::Unknown { origin, universe: self.universe() }) + .vid; + Const::new_var(self.interner, const_var_id).into() + } + } + } + + /// Given a set of generics defined on a type or impl, returns the generic parameters mapping + /// each type/region parameter to a fresh inference variable. + pub fn fresh_args_for_item(&self, def_id: SolverDefId) -> GenericArgs<'db> { + GenericArgs::for_item(self.interner, def_id, |name, index, kind, _| { + self.var_for_def(kind, name) + }) + } + + /// Returns `true` if errors have been reported since this infcx was + /// created. This is sometimes used as a heuristic to skip + /// reporting errors that often occur as a result of earlier + /// errors, but where it's hard to be 100% sure (e.g., unresolved + /// inference variables, regionck errors). + #[must_use = "this method does not have any side effects"] + pub fn tainted_by_errors(&self) -> Option { + self.tainted_by_errors.get() + } + + /// Set the "tainted by errors" flag to true. We call this when we + /// observe an error from a prior pass. + pub fn set_tainted_by_errors(&self, e: ErrorGuaranteed) { + debug!("set_tainted_by_errors(ErrorGuaranteed)"); + self.tainted_by_errors.set(Some(e)); + } + + #[instrument(level = "debug", skip(self), ret)] + pub fn take_opaque_types(&self) -> Vec<(OpaqueTypeKey<'db>, OpaqueHiddenType<'db>)> { + self.inner.borrow_mut().opaque_type_storage.take_opaque_types().collect() + } + + #[instrument(level = "debug", skip(self), ret)] + pub fn clone_opaque_types(&self) -> Vec<(OpaqueTypeKey<'db>, OpaqueHiddenType<'db>)> { + self.inner.borrow_mut().opaque_type_storage.iter_opaque_types().collect() + } + + #[inline(always)] + pub fn can_define_opaque_ty(&self, id: impl Into) -> bool { + match self.typing_mode_unchecked() { + TypingMode::Analysis { defining_opaque_types_and_generators } => { + defining_opaque_types_and_generators.contains(&id.into()) + } + TypingMode::Coherence | TypingMode::PostAnalysis => false, + TypingMode::Borrowck { defining_opaque_types } => unimplemented!(), + TypingMode::PostBorrowckAnalysis { defined_opaque_types } => unimplemented!(), + } + } + + /// If `TyVar(vid)` resolves to a type, return that type. Else, return the + /// universe index of `TyVar(vid)`. + pub fn probe_ty_var(&self, vid: TyVid) -> Result, UniverseIndex> { + use self::type_variable::TypeVariableValue; + + match self.inner.borrow_mut().type_variables().probe(vid) { + TypeVariableValue::Known { value } => Ok(value), + TypeVariableValue::Unknown { universe } => Err(universe), + } + } + + pub fn shallow_resolve(&self, ty: Ty<'db>) -> Ty<'db> { + if let TyKind::Infer(v) = ty.kind() { + match v { + InferTy::TyVar(v) => { + // Not entirely obvious: if `typ` is a type variable, + // it can be resolved to an int/float variable, which + // can then be recursively resolved, hence the + // recursion. Note though that we prevent type + // variables from unifying to other type variables + // directly (though they may be embedded + // structurally), and we prevent cycles in any case, + // so this recursion should always be of very limited + // depth. + // + // Note: if these two lines are combined into one we get + // dynamic borrow errors on `self.inner`. + let known = self.inner.borrow_mut().type_variables().probe(v).known(); + known.map_or(ty, |t| self.shallow_resolve(t)) + } + + InferTy::IntVar(v) => { + match self.inner.borrow_mut().int_unification_table().probe_value(v) { + IntVarValue::IntType(ty) => Ty::new_int(self.interner, ty), + IntVarValue::UintType(ty) => Ty::new_uint(self.interner, ty), + IntVarValue::Unknown => ty, + } + } + + InferTy::FloatVar(v) => { + match self.inner.borrow_mut().float_unification_table().probe_value(v) { + FloatVarValue::Known(ty) => Ty::new_float(self.interner, ty), + FloatVarValue::Unknown => ty, + } + } + + InferTy::FreshTy(_) | InferTy::FreshIntTy(_) | InferTy::FreshFloatTy(_) => ty, + } + } else { + ty + } + } + + pub fn shallow_resolve_const(&self, ct: Const<'db>) -> Const<'db> { + match ct.kind() { + ConstKind::Infer(infer_ct) => match infer_ct { + InferConst::Var(vid) => self + .inner + .borrow_mut() + .const_unification_table() + .probe_value(vid) + .known() + .unwrap_or(ct), + InferConst::Fresh(_) => ct, + }, + ConstKind::Param(_) + | ConstKind::Bound(_, _) + | ConstKind::Placeholder(_) + | ConstKind::Unevaluated(_) + | ConstKind::Value(_) + | ConstKind::Error(_) + | ConstKind::Expr(_) => ct, + } + } + + pub fn root_var(&self, var: TyVid) -> TyVid { + self.inner.borrow_mut().type_variables().root_var(var) + } + + pub fn root_const_var(&self, var: ConstVid) -> ConstVid { + self.inner.borrow_mut().const_unification_table().find(var).vid + } + + /// Resolves an int var to a rigid int type, if it was constrained to one, + /// or else the root int var in the unification table. + pub fn opportunistic_resolve_int_var(&self, vid: IntVid) -> Ty<'db> { + let mut inner = self.inner.borrow_mut(); + let value = inner.int_unification_table().probe_value(vid); + match value { + IntVarValue::IntType(ty) => Ty::new_int(self.interner, ty), + IntVarValue::UintType(ty) => Ty::new_uint(self.interner, ty), + IntVarValue::Unknown => { + Ty::new_int_var(self.interner, inner.int_unification_table().find(vid)) + } + } + } + + pub fn resolve_int_var(&self, vid: IntVid) -> Option> { + let mut inner = self.inner.borrow_mut(); + let value = inner.int_unification_table().probe_value(vid); + match value { + IntVarValue::IntType(ty) => Some(Ty::new_int(self.interner, ty)), + IntVarValue::UintType(ty) => Some(Ty::new_uint(self.interner, ty)), + IntVarValue::Unknown => None, + } + } + + /// Resolves a float var to a rigid int type, if it was constrained to one, + /// or else the root float var in the unification table. + pub fn opportunistic_resolve_float_var(&self, vid: FloatVid) -> Ty<'db> { + let mut inner = self.inner.borrow_mut(); + let value = inner.float_unification_table().probe_value(vid); + match value { + FloatVarValue::Known(ty) => Ty::new_float(self.interner, ty), + FloatVarValue::Unknown => { + Ty::new_float_var(self.interner, inner.float_unification_table().find(vid)) + } + } + } + + pub fn resolve_float_var(&self, vid: FloatVid) -> Option> { + let mut inner = self.inner.borrow_mut(); + let value = inner.float_unification_table().probe_value(vid); + match value { + FloatVarValue::Known(ty) => Some(Ty::new_float(self.interner, ty)), + FloatVarValue::Unknown => None, + } + } + + /// Where possible, replaces type/const variables in + /// `value` with their final value. Note that region variables + /// are unaffected. If a type/const variable has not been unified, it + /// is left as is. This is an idempotent operation that does + /// not affect inference state in any way and so you can do it + /// at will. + pub fn resolve_vars_if_possible(&self, value: T) -> T + where + T: TypeFoldable>, + { + if let Err(guar) = value.error_reported() { + self.set_tainted_by_errors(guar); + } + if !value.has_non_region_infer() { + return value; + } + let mut r = resolve::OpportunisticVarResolver::new(self); + value.fold_with(&mut r) + } + + pub fn probe_const_var(&self, vid: ConstVid) -> Result, UniverseIndex> { + match self.inner.borrow_mut().const_unification_table().probe_value(vid) { + ConstVariableValue::Known { value } => Ok(value), + ConstVariableValue::Unknown { origin: _, universe } => Err(universe), + } + } + + // Instantiates the bound variables in a given binder with fresh inference + // variables in the current universe. + // + // Use this method if you'd like to find some generic parameters of the binder's + // variables (e.g. during a method call). If there isn't a [`BoundRegionConversionTime`] + // that corresponds to your use case, consider whether or not you should + // use [`InferCtxt::enter_forall`] instead. + pub fn instantiate_binder_with_fresh_vars( + &self, + lbrct: BoundRegionConversionTime, + value: Binder<'db, T>, + ) -> T + where + T: TypeFoldable> + Clone, + { + if let Some(inner) = value.clone().no_bound_vars() { + return inner; + } + + let bound_vars = value.clone().bound_vars(); + let mut args = Vec::with_capacity(bound_vars.len()); + + for bound_var_kind in bound_vars { + let arg: GenericArg<'db> = match bound_var_kind { + BoundVarKind::Ty(_) => self.next_ty_var().into(), + BoundVarKind::Region(br) => self.next_region_var().into(), + BoundVarKind::Const => self.next_const_var().into(), + }; + args.push(arg); + } + + struct ToFreshVars<'db> { + args: Vec>, + } + + impl<'db> BoundVarReplacerDelegate<'db> for ToFreshVars<'db> { + fn replace_region(&mut self, br: BoundRegion) -> Region<'db> { + self.args[br.var.index()].expect_region() + } + fn replace_ty(&mut self, bt: BoundTy) -> Ty<'db> { + self.args[bt.var.index()].expect_ty() + } + fn replace_const(&mut self, bv: BoundConst) -> Const<'db> { + self.args[bv.var.index()].expect_const() + } + } + let delegate = ToFreshVars { args }; + self.interner.replace_bound_vars_uncached(value, delegate) + } + + /// Obtains the latest type of the given closure; this may be a + /// closure in the current function, in which case its + /// `ClosureKind` may not yet be known. + pub fn closure_kind(&self, closure_ty: Ty<'db>) -> Option { + let unresolved_kind_ty = match closure_ty.kind() { + TyKind::Closure(_, args) => args.as_closure().kind_ty(), + TyKind::CoroutineClosure(_, args) => args.as_coroutine_closure().kind_ty(), + _ => panic!("unexpected type {closure_ty:?}"), + }; + let closure_kind_ty = self.shallow_resolve(unresolved_kind_ty); + closure_kind_ty.to_opt_closure_kind() + } + + pub fn universe(&self) -> UniverseIndex { + self.universe.get() + } + + /// Creates and return a fresh universe that extends all previous + /// universes. Updates `self.universe` to that new universe. + pub fn create_next_universe(&self) -> UniverseIndex { + let u = self.universe.get().next_universe(); + debug!("create_next_universe {u:?}"); + self.universe.set(u); + u + } + + /// The returned function is used in a fast path. If it returns `true` the variable is + /// unchanged, `false` indicates that the status is unknown. + #[inline] + pub fn is_ty_infer_var_definitely_unchanged<'a>( + &'a self, + ) -> (impl Fn(TyOrConstInferVar) -> bool + Captures<'db> + 'a) { + // This hoists the borrow/release out of the loop body. + let inner = self.inner.try_borrow(); + + move |infer_var: TyOrConstInferVar| match (infer_var, &inner) { + (TyOrConstInferVar::Ty(ty_var), Ok(inner)) => { + use self::type_variable::TypeVariableValue; + + matches!( + inner.try_type_variables_probe_ref(ty_var), + Some(TypeVariableValue::Unknown { .. }) + ) + } + _ => false, + } + } + + /// `ty_or_const_infer_var_changed` is equivalent to one of these two: + /// * `shallow_resolve(ty) != ty` (where `ty.kind = Infer(_)`) + /// * `shallow_resolve(ct) != ct` (where `ct.kind = ConstKind::Infer(_)`) + /// + /// However, `ty_or_const_infer_var_changed` is more efficient. It's always + /// inlined, despite being large, because it has only two call sites that + /// are extremely hot (both in `traits::fulfill`'s checking of `stalled_on` + /// inference variables), and it handles both `Ty` and `Const` without + /// having to resort to storing full `GenericArg`s in `stalled_on`. + #[inline(always)] + pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar) -> bool { + match infer_var { + TyOrConstInferVar::Ty(v) => { + use self::type_variable::TypeVariableValue; + + // If `inlined_probe` returns a `Known` value, it never equals + // `Infer(TyVar(v))`. + match self.inner.borrow_mut().type_variables().inlined_probe(v) { + TypeVariableValue::Unknown { .. } => false, + TypeVariableValue::Known { .. } => true, + } + } + + TyOrConstInferVar::TyInt(v) => { + // If `inlined_probe_value` returns a value it's always a + // `Int(_)` or `UInt(_)`, which never matches a + // `Infer(_)`. + self.inner.borrow_mut().int_unification_table().inlined_probe_value(v).is_known() + } + + TyOrConstInferVar::TyFloat(v) => { + // If `probe_value` returns a value it's always a + // `Float(_)`, which never matches a `Infer(_)`. + // + // Not `inlined_probe_value(v)` because this call site is colder. + self.inner.borrow_mut().float_unification_table().probe_value(v).is_known() + } + + TyOrConstInferVar::Const(v) => { + // If `probe_value` returns a `Known` value, it never equals + // `ConstKind::Infer(InferConst::Var(v))`. + // + // Not `inlined_probe_value(v)` because this call site is colder. + match self.inner.borrow_mut().const_unification_table().probe_value(v) { + ConstVariableValue::Unknown { .. } => false, + ConstVariableValue::Known { .. } => true, + } + } + } + } + + fn sub_unification_table_root_var(&self, var: rustc_type_ir::TyVid) -> rustc_type_ir::TyVid { + self.inner.borrow_mut().type_variables().sub_unification_table_root_var(var) + } + + fn sub_unify_ty_vids_raw(&self, a: rustc_type_ir::TyVid, b: rustc_type_ir::TyVid) { + self.inner.borrow_mut().type_variables().sub_unify(a, b); + } +} + +/// Helper for [InferCtxt::ty_or_const_infer_var_changed] (see comment on that), currently +/// used only for `traits::fulfill`'s list of `stalled_on` inference variables. +#[derive(Copy, Clone, Debug)] +pub enum TyOrConstInferVar { + /// Equivalent to `Infer(TyVar(_))`. + Ty(TyVid), + /// Equivalent to `Infer(IntVar(_))`. + TyInt(IntVid), + /// Equivalent to `Infer(FloatVar(_))`. + TyFloat(FloatVid), + + /// Equivalent to `ConstKind::Infer(InferConst::Var(_))`. + Const(ConstVid), +} + +impl TyOrConstInferVar { + /// Tries to extract an inference variable from a type or a constant, returns `None` + /// for types other than `Infer(_)` (or `InferTy::Fresh*`) and + /// for constants other than `ConstKind::Infer(_)` (or `InferConst::Fresh`). + pub fn maybe_from_generic_arg<'db>(arg: GenericArg<'db>) -> Option { + match arg.kind() { + GenericArgKind::Type(ty) => Self::maybe_from_ty(ty), + GenericArgKind::Const(ct) => Self::maybe_from_const(ct), + GenericArgKind::Lifetime(_) => None, + } + } + + /// Tries to extract an inference variable from a type, returns `None` + /// for types other than `Infer(_)` (or `InferTy::Fresh*`). + fn maybe_from_ty<'db>(ty: Ty<'db>) -> Option { + match ty.kind() { + TyKind::Infer(InferTy::TyVar(v)) => Some(TyOrConstInferVar::Ty(v)), + TyKind::Infer(InferTy::IntVar(v)) => Some(TyOrConstInferVar::TyInt(v)), + TyKind::Infer(InferTy::FloatVar(v)) => Some(TyOrConstInferVar::TyFloat(v)), + _ => None, + } + } + + /// Tries to extract an inference variable from a constant, returns `None` + /// for constants other than `ConstKind::Infer(_)` (or `InferConst::Fresh`). + fn maybe_from_const<'db>(ct: Const<'db>) -> Option { + match ct.kind() { + ConstKind::Infer(InferConst::Var(v)) => Some(TyOrConstInferVar::Const(v)), + _ => None, + } + } +} + +impl<'db> TypeTrace<'db> { + pub fn types(cause: &ObligationCause, a: Ty<'db>, b: Ty<'db>) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::Terms(ExpectedFound::new(a.into(), b.into())), + } + } + + pub fn trait_refs( + cause: &ObligationCause, + a: TraitRef<'db>, + b: TraitRef<'db>, + ) -> TypeTrace<'db> { + TypeTrace { cause: cause.clone(), values: ValuePairs::TraitRefs(ExpectedFound::new(a, b)) } + } + + pub fn consts(cause: &ObligationCause, a: Const<'db>, b: Const<'db>) -> TypeTrace<'db> { + TypeTrace { + cause: cause.clone(), + values: ValuePairs::Terms(ExpectedFound::new(a.into(), b.into())), + } + } +} + +/// Requires that `region` must be equal to one of the regions in `choice_regions`. +/// We often denote this using the syntax: +/// +/// ```text +/// R0 member of [O1..On] +/// ``` +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MemberConstraint<'db> { + /// The `DefId` and args of the opaque type causing this constraint. + /// Used for error reporting. + pub key: OpaqueTypeKey<'db>, + + /// The hidden type in which `member_region` appears: used for error reporting. + pub hidden_ty: Ty<'db>, + + /// The region `R0`. + pub member_region: Region<'db>, + + /// The options `O1..On`. + pub choice_regions: Arc>>, +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/mod.rs new file mode 100644 index 0000000000000..0f68ec8cdb5b4 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/mod.rs @@ -0,0 +1,56 @@ +//! Things related to the infer context of the next-trait-solver. + +use std::sync::Arc; + +use tracing::{debug, instrument}; + +use crate::next_solver::{ + Clause, ClauseKind, FxIndexMap, GenericArgs, OpaqueTypeKey, ProjectionPredicate, SolverDefId, + TypingMode, util::BottomUpFolder, +}; + +pub(crate) mod table; + +pub(crate) use table::{OpaqueTypeStorage, OpaqueTypeTable}; + +use crate::next_solver::{ + AliasTy, Binder, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Const, DbInterner, Goal, + ParamEnv, Predicate, PredicateKind, Region, Ty, TyKind, + fold::FnMutDelegate, + infer::{ + DefineOpaqueTypes, InferCtxt, TypeTrace, + traits::{Obligation, PredicateObligations}, + }, +}; +use rustc_type_ir::{ + AliasRelationDirection, AliasTyKind, BoundConstness, BoundVar, Flags, GenericArgKind, InferTy, + Interner, RegionKind, TypeFlags, TypeFoldable, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, Upcast, Variance, + error::{ExpectedFound, TypeError}, + inherent::{DefId, GenericArgs as _, IntoKind, SliceLike}, + relate::{ + Relate, TypeRelation, VarianceDiagInfo, + combine::{super_combine_consts, super_combine_tys}, + }, +}; + +use super::{InferOk, traits::ObligationCause}; + +#[derive(Copy, Clone, Debug)] +pub struct OpaqueHiddenType<'db> { + pub ty: Ty<'db>, +} + +impl<'db> InferCtxt<'db> { + /// Insert a hidden type into the opaque type storage, making sure + /// it hasn't previously been defined. This does not emit any + /// constraints and it's the responsibility of the caller to make + /// sure that the item bounds of the opaque are checked. + pub fn register_hidden_type_in_storage( + &self, + opaque_type_key: OpaqueTypeKey<'db>, + hidden_ty: OpaqueHiddenType<'db>, + ) -> Option> { + self.inner.borrow_mut().opaque_types().register(opaque_type_key, hidden_ty) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs new file mode 100644 index 0000000000000..8ab409d782813 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs @@ -0,0 +1,166 @@ +//! Things related to storage opaques in the infer context of the next-trait-solver. + +use std::ops::Deref; + +use ena::undo_log::UndoLogs; +use tracing::instrument; + +use super::OpaqueHiddenType; +use crate::next_solver::{ + FxIndexMap, OpaqueTypeKey, Ty, + infer::snapshot::undo_log::{InferCtxtUndoLogs, UndoLog}, +}; + +#[derive(Default, Debug, Clone)] +pub(crate) struct OpaqueTypeStorage<'db> { + opaque_types: FxIndexMap, OpaqueHiddenType<'db>>, + duplicate_entries: Vec<(OpaqueTypeKey<'db>, OpaqueHiddenType<'db>)>, +} + +/// The number of entries in the opaque type storage at a given point. +/// +/// Used to check that we haven't added any new opaque types after checking +/// the opaque types currently in the storage. +#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)] +pub struct OpaqueTypeStorageEntries { + opaque_types: usize, + duplicate_entries: usize, +} + +impl rustc_type_ir::inherent::OpaqueTypeStorageEntries for OpaqueTypeStorageEntries { + fn needs_reevaluation(self, canonicalized: usize) -> bool { + self.opaque_types != canonicalized + } +} + +impl<'db> OpaqueTypeStorage<'db> { + #[instrument(level = "debug")] + pub(crate) fn remove(&mut self, key: OpaqueTypeKey<'db>, prev: Option>) { + if let Some(prev) = prev { + *self.opaque_types.get_mut(&key).unwrap() = prev; + } else { + // FIXME(#120456) - is `swap_remove` correct? + match self.opaque_types.swap_remove(&key) { + None => { + panic!("reverted opaque type inference that was never registered: {key:?}") + } + Some(_) => {} + } + } + } + + pub(crate) fn pop_duplicate_entry(&mut self) { + let entry = self.duplicate_entries.pop(); + assert!(entry.is_some()); + } + + pub fn is_empty(&self) -> bool { + let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; + opaque_types.is_empty() && duplicate_entries.is_empty() + } + + pub(crate) fn take_opaque_types( + &mut self, + ) -> impl Iterator, OpaqueHiddenType<'db>)> { + let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; + std::mem::take(opaque_types).into_iter().chain(std::mem::take(duplicate_entries)) + } + + pub fn num_entries(&self) -> OpaqueTypeStorageEntries { + OpaqueTypeStorageEntries { + opaque_types: self.opaque_types.len(), + duplicate_entries: self.duplicate_entries.len(), + } + } + + pub fn opaque_types_added_since( + &self, + prev_entries: OpaqueTypeStorageEntries, + ) -> impl Iterator, OpaqueHiddenType<'db>)> { + self.opaque_types + .iter() + .skip(prev_entries.opaque_types) + .map(|(k, v)| (*k, *v)) + .chain(self.duplicate_entries.iter().skip(prev_entries.duplicate_entries).copied()) + } + + /// Only returns the opaque types from the lookup table. These are used + /// when normalizing opaque types and have a unique key. + /// + /// Outside of canonicalization one should generally use `iter_opaque_types` + /// to also consider duplicate entries. + pub fn iter_lookup_table( + &self, + ) -> impl Iterator, OpaqueHiddenType<'db>)> { + self.opaque_types.iter().map(|(k, v)| (*k, *v)) + } + + /// Only returns the opaque types which are stored in `duplicate_entries`. + /// + /// These have to considered when checking all opaque type uses but are e.g. + /// irrelevant for canonical inputs as nested queries never meaningfully + /// accesses them. + pub fn iter_duplicate_entries( + &self, + ) -> impl Iterator, OpaqueHiddenType<'db>)> { + self.duplicate_entries.iter().copied() + } + + pub fn iter_opaque_types( + &self, + ) -> impl Iterator, OpaqueHiddenType<'db>)> { + let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; + opaque_types.iter().map(|(k, v)| (*k, *v)).chain(duplicate_entries.iter().copied()) + } + + #[inline] + pub(crate) fn with_log<'a>( + &'a mut self, + undo_log: &'a mut InferCtxtUndoLogs<'db>, + ) -> OpaqueTypeTable<'a, 'db> { + OpaqueTypeTable { storage: self, undo_log } + } +} + +impl<'db> Drop for OpaqueTypeStorage<'db> { + fn drop(&mut self) { + if !self.opaque_types.is_empty() { + panic!("{:?}", self.opaque_types) + } + } +} + +pub(crate) struct OpaqueTypeTable<'a, 'db> { + storage: &'a mut OpaqueTypeStorage<'db>, + + undo_log: &'a mut InferCtxtUndoLogs<'db>, +} +impl<'db> Deref for OpaqueTypeTable<'_, 'db> { + type Target = OpaqueTypeStorage<'db>; + fn deref(&self) -> &Self::Target { + self.storage + } +} + +impl<'a, 'db> OpaqueTypeTable<'a, 'db> { + #[instrument(skip(self), level = "debug")] + pub fn register( + &mut self, + key: OpaqueTypeKey<'db>, + hidden_type: OpaqueHiddenType<'db>, + ) -> Option> { + if let Some(entry) = self.storage.opaque_types.get_mut(&key) { + let prev = std::mem::replace(entry, hidden_type); + self.undo_log.push(UndoLog::OpaqueTypes(key, Some(prev))); + return Some(prev.ty); + } + self.storage.opaque_types.insert(key, hidden_type); + self.undo_log.push(UndoLog::OpaqueTypes(key, None)); + None + } + + pub fn add_duplicate(&mut self, key: OpaqueTypeKey<'db>, hidden_type: OpaqueHiddenType<'db>) { + self.storage.duplicate_entries.push((key, hidden_type)); + self.undo_log.push(UndoLog::DuplicateOpaqueType); + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs new file mode 100644 index 0000000000000..7f15a467b3e87 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs @@ -0,0 +1,689 @@ +//! See `README.md`. + +use std::ops::Range; +use std::sync::Arc; +use std::{cmp, fmt, mem}; + +use ena::undo_log::{Rollback, UndoLogs}; +use ena::unify as ut; +use rustc_hash::FxHashMap; +use rustc_index::IndexVec; +use rustc_type_ir::inherent::IntoKind; +use rustc_type_ir::{RegionKind, RegionVid, UniverseIndex}; +use tracing::{debug, instrument}; + +use self::CombineMapType::*; +use self::UndoLog::*; +use super::MemberConstraint; +use super::unify_key::RegionVidKey; +use crate::next_solver::infer::snapshot::undo_log::{InferCtxtUndoLogs, Snapshot}; +use crate::next_solver::infer::unify_key::RegionVariableValue; +use crate::next_solver::{ + AliasTy, Binder, DbInterner, OpaqueTypeKey, ParamTy, PlaceholderTy, Region, Ty, +}; + +#[derive(Debug, Clone, Default)] +pub struct RegionConstraintStorage<'db> { + /// For each `RegionVid`, the corresponding `RegionVariableOrigin`. + pub(super) var_infos: IndexVec, + + pub(super) data: RegionConstraintData<'db>, + + /// For a given pair of regions (R1, R2), maps to a region R3 that + /// is designated as their LUB (edges R1 <= R3 and R2 <= R3 + /// exist). This prevents us from making many such regions. + lubs: CombineMap<'db>, + + /// For a given pair of regions (R1, R2), maps to a region R3 that + /// is designated as their GLB (edges R3 <= R1 and R3 <= R2 + /// exist). This prevents us from making many such regions. + glbs: CombineMap<'db>, + + /// When we add a R1 == R2 constraint, we currently add (a) edges + /// R1 <= R2 and R2 <= R1 and (b) we unify the two regions in this + /// table. You can then call `opportunistic_resolve_var` early + /// which will map R1 and R2 to some common region (i.e., either + /// R1 or R2). This is important when fulfillment, dropck and other such + /// code is iterating to a fixed point, because otherwise we sometimes + /// would wind up with a fresh stream of region variables that have been + /// equated but appear distinct. + pub(super) unification_table: ut::UnificationTableStorage>, + + /// a flag set to true when we perform any unifications; this is used + /// to micro-optimize `take_and_reset_data` + any_unifications: bool, +} + +pub struct RegionConstraintCollector<'db, 'a> { + storage: &'a mut RegionConstraintStorage<'db>, + undo_log: &'a mut InferCtxtUndoLogs<'db>, +} + +pub type VarInfos = IndexVec; + +/// The full set of region constraints gathered up by the collector. +/// Describes constraints between the region variables and other +/// regions, as well as other conditions that must be verified, or +/// assumptions that can be made. +#[derive(Debug, Default, Clone)] +pub struct RegionConstraintData<'db> { + /// Constraints of the form `A <= B`, where either `A` or `B` can + /// be a region variable (or neither, as it happens). + pub constraints: Vec>, + + /// Constraints of the form `R0 member of [R1, ..., Rn]`, meaning that + /// `R0` must be equal to one of the regions `R1..Rn`. These occur + /// with `impl Trait` quite frequently. + pub member_constraints: Vec>, + + /// A "verify" is something that we need to verify after inference + /// is done, but which does not directly affect inference in any + /// way. + /// + /// An example is a `A <= B` where neither `A` nor `B` are + /// inference variables. + pub verifys: Vec>, +} + +/// Represents a constraint that influences the inference process. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum Constraint<'db> { + /// A region variable is a subregion of another. + VarSubVar(RegionVid, RegionVid), + + /// A concrete region is a subregion of region variable. + RegSubVar(Region<'db>, RegionVid), + + /// A region variable is a subregion of a concrete region. This does not + /// directly affect inference, but instead is checked after + /// inference is complete. + VarSubReg(RegionVid, Region<'db>), + + /// A constraint where neither side is a variable. This does not + /// directly affect inference, but instead is checked after + /// inference is complete. + RegSubReg(Region<'db>, Region<'db>), +} + +impl<'db> Constraint<'db> { + pub fn involves_placeholders(&self) -> bool { + match self { + Constraint::VarSubVar(_, _) => false, + Constraint::VarSubReg(_, r) | Constraint::RegSubVar(r, _) => r.is_placeholder(), + Constraint::RegSubReg(r, s) => r.is_placeholder() || s.is_placeholder(), + } + } +} + +#[derive(Debug, Clone)] +pub struct Verify<'db> { + pub kind: GenericKind<'db>, + pub region: Region<'db>, + pub bound: VerifyBound<'db>, +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub enum GenericKind<'db> { + Param(ParamTy), + Placeholder(PlaceholderTy), + Alias(AliasTy<'db>), +} + +/// Describes the things that some `GenericKind` value `G` is known to +/// outlive. Each variant of `VerifyBound` can be thought of as a +/// function: +/// ```ignore (pseudo-rust) +/// fn(min: Region) -> bool { .. } +/// ``` +/// where `true` means that the region `min` meets that `G: min`. +/// (False means nothing.) +/// +/// So, for example, if we have the type `T` and we have in scope that +/// `T: 'a` and `T: 'b`, then the verify bound might be: +/// ```ignore (pseudo-rust) +/// fn(min: Region) -> bool { +/// ('a: min) || ('b: min) +/// } +/// ``` +/// This is described with an `AnyRegion('a, 'b)` node. +#[derive(Debug, Clone)] +pub enum VerifyBound<'db> { + /// See [`VerifyIfEq`] docs + IfEq(Binder<'db, VerifyIfEq<'db>>), + + /// Given a region `R`, expands to the function: + /// + /// ```ignore (pseudo-rust) + /// fn(min) -> bool { + /// R: min + /// } + /// ``` + /// + /// This is used when we can establish that `G: R` -- therefore, + /// if `R: min`, then by transitivity `G: min`. + OutlivedBy(Region<'db>), + + /// Given a region `R`, true if it is `'empty`. + IsEmpty, + + /// Given a set of bounds `B`, expands to the function: + /// + /// ```ignore (pseudo-rust) + /// fn(min) -> bool { + /// exists (b in B) { b(min) } + /// } + /// ``` + /// + /// In other words, if we meet some bound in `B`, that suffices. + /// This is used when all the bounds in `B` are known to apply to `G`. + AnyBound(Vec>), + + /// Given a set of bounds `B`, expands to the function: + /// + /// ```ignore (pseudo-rust) + /// fn(min) -> bool { + /// forall (b in B) { b(min) } + /// } + /// ``` + /// + /// In other words, if we meet *all* bounds in `B`, that suffices. + /// This is used when *some* bound in `B` is known to suffice, but + /// we don't know which. + AllBounds(Vec>), +} + +/// This is a "conditional bound" that checks the result of inference +/// and supplies a bound if it ended up being relevant. It's used in situations +/// like this: +/// +/// ```rust,ignore (pseudo-Rust) +/// fn foo<'a, 'b, T: SomeTrait<'a>> +/// where +/// >::Item: 'b +/// ``` +/// +/// If we have an obligation like `>::Item: 'c`, then +/// we don't know yet whether it suffices to show that `'b: 'c`. If `'?x` winds +/// up being equal to `'a`, then the where-clauses on function applies, and +/// in that case we can show `'b: 'c`. But if `'?x` winds up being something +/// else, the bound isn't relevant. +/// +/// In the [`VerifyBound`], this struct is enclosed in `Binder` to account +/// for cases like +/// +/// ```rust,ignore (pseudo-Rust) +/// where for<'a> ::Item: 'a +/// ``` +/// +/// The idea is that we have to find some instantiation of `'a` that can +/// make `>::Item` equal to the final value of `G`, +/// the generic we are checking. +/// +/// ```ignore (pseudo-rust) +/// fn(min) -> bool { +/// exists<'a> { +/// if G == K { +/// B(min) +/// } else { +/// false +/// } +/// } +/// } +/// ``` +#[derive(Debug, Clone)] +pub struct VerifyIfEq<'db> { + /// Type which must match the generic `G` + pub ty: Ty<'db>, + + /// Bound that applies if `ty` is equal. + pub bound: Region<'db>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct TwoRegions<'db> { + a: Region<'db>, + b: Region<'db>, +} + +#[derive(Clone, PartialEq)] +pub(crate) enum UndoLog<'db> { + /// We added `RegionVid`. + AddVar(RegionVid), + + /// We added the given `constraint`. + AddConstraint(usize), + + /// We added the given `verify`. + AddVerify(usize), + + /// We added a GLB/LUB "combination variable". + AddCombination(CombineMapType, TwoRegions<'db>), +} + +#[derive(Clone, PartialEq)] +pub(crate) enum CombineMapType { + Lub, + Glb, +} + +type CombineMap<'db> = FxHashMap, RegionVid>; + +#[derive(Debug, Clone)] +pub struct RegionVariableInfo { + // FIXME: This is only necessary for `fn take_and_reset_data` and + // `lexical_region_resolve`. We should rework `lexical_region_resolve` + // in the near/medium future anyways and could move the unverse info + // for `fn take_and_reset_data` into a separate table which is + // only populated when needed. + // + // For both of these cases it is fine that this can diverge from the + // actual universe of the variable, which is directly stored in the + // unification table for unknown region variables. At some point we could + // stop emitting bidirectional outlives constraints if equate succeeds. + // This would be currently unsound as it would cause us to drop the universe + // changes in `lexical_region_resolve`. + pub universe: UniverseIndex, +} + +pub(crate) struct RegionSnapshot { + any_unifications: bool, +} + +impl<'db> RegionConstraintStorage<'db> { + #[inline] + pub(crate) fn with_log<'a>( + &'a mut self, + undo_log: &'a mut InferCtxtUndoLogs<'db>, + ) -> RegionConstraintCollector<'db, 'a> { + RegionConstraintCollector { storage: self, undo_log } + } +} + +impl<'db> RegionConstraintCollector<'db, '_> { + pub fn num_region_vars(&self) -> usize { + self.storage.var_infos.len() + } + + pub fn region_constraint_data(&self) -> &RegionConstraintData<'db> { + &self.storage.data + } + + /// Takes (and clears) the current set of constraints. Note that + /// the set of variables remains intact, but all relationships + /// between them are reset. This is used during NLL checking to + /// grab the set of constraints that arose from a particular + /// operation. + /// + /// We don't want to leak relationships between variables between + /// points because just because (say) `r1 == r2` was true at some + /// point P in the graph doesn't imply that it will be true at + /// some other point Q, in NLL. + /// + /// Not legal during a snapshot. + pub fn take_and_reset_data(&mut self) -> RegionConstraintData<'db> { + assert!(!UndoLogs::>::in_snapshot(&self.undo_log)); + + // If you add a new field to `RegionConstraintCollector`, you + // should think carefully about whether it needs to be cleared + // or updated in some way. + let RegionConstraintStorage { + var_infos: _, + data, + lubs, + glbs, + unification_table: _, + any_unifications, + } = self.storage; + + // Clear the tables of (lubs, glbs), so that we will create + // fresh regions if we do a LUB operation. As it happens, + // LUB/GLB are not performed by the MIR type-checker, which is + // the one that uses this method, but it's good to be correct. + lubs.clear(); + glbs.clear(); + + let data = mem::take(data); + + // Clear all unifications and recreate the variables a "now + // un-unified" state. Note that when we unify `a` and `b`, we + // also insert `a <= b` and a `b <= a` edges, so the + // `RegionConstraintData` contains the relationship here. + if *any_unifications { + *any_unifications = false; + // Manually inlined `self.unification_table_mut()` as `self` is used in the closure. + ut::UnificationTable::with_log(&mut self.storage.unification_table, &mut self.undo_log) + .reset_unifications(|key| RegionVariableValue::Unknown { + universe: self.storage.var_infos[key.vid].universe, + }); + } + + data + } + + pub fn data(&self) -> &RegionConstraintData<'db> { + &self.storage.data + } + + pub(super) fn start_snapshot(&self) -> RegionSnapshot { + debug!("RegionConstraintCollector: start_snapshot"); + RegionSnapshot { any_unifications: self.storage.any_unifications } + } + + pub(super) fn rollback_to(&mut self, snapshot: RegionSnapshot) { + debug!("RegionConstraintCollector: rollback_to({:?})", snapshot); + self.storage.any_unifications = snapshot.any_unifications; + } + + pub(super) fn new_region_var(&mut self, universe: UniverseIndex) -> RegionVid { + let vid = self.storage.var_infos.push(RegionVariableInfo { universe }); + + let u_vid = self.unification_table_mut().new_key(RegionVariableValue::Unknown { universe }); + assert_eq!(vid, u_vid.vid); + self.undo_log.push(AddVar(vid)); + debug!("created new region variable {:?} in {:?}", vid, universe); + vid + } + + fn add_constraint(&mut self, constraint: Constraint<'db>) { + // cannot add constraints once regions are resolved + debug!("RegionConstraintCollector: add_constraint({:?})", constraint); + + let index = self.storage.data.constraints.len(); + self.storage.data.constraints.push(constraint); + self.undo_log.push(AddConstraint(index)); + } + + pub(super) fn make_eqregion(&mut self, a: Region<'db>, b: Region<'db>) { + if a != b { + // Eventually, it would be nice to add direct support for + // equating regions. + self.make_subregion(a, b); + self.make_subregion(b, a); + + match (a.kind(), b.kind()) { + (RegionKind::ReVar(a), RegionKind::ReVar(b)) => { + debug!("make_eqregion: unifying {:?} with {:?}", a, b); + if self.unification_table_mut().unify_var_var(a, b).is_ok() { + self.storage.any_unifications = true; + } + } + (RegionKind::ReVar(vid), _) => { + debug!("make_eqregion: unifying {:?} with {:?}", vid, b); + if self + .unification_table_mut() + .unify_var_value(vid, RegionVariableValue::Known { value: b }) + .is_ok() + { + self.storage.any_unifications = true; + }; + } + (_, RegionKind::ReVar(vid)) => { + debug!("make_eqregion: unifying {:?} with {:?}", a, vid); + if self + .unification_table_mut() + .unify_var_value(vid, RegionVariableValue::Known { value: a }) + .is_ok() + { + self.storage.any_unifications = true; + }; + } + (_, _) => {} + } + } + } + + #[instrument(skip(self), level = "debug")] + pub(super) fn make_subregion(&mut self, sub: Region<'db>, sup: Region<'db>) { + // cannot add constraints once regions are resolved + + match (sub.kind(), sup.kind()) { + (RegionKind::ReBound(..), _) | (_, RegionKind::ReBound(..)) => { + panic!("cannot relate bound region: {sub:?} <= {sup:?}"); + } + (_, RegionKind::ReStatic) => { + // all regions are subregions of static, so we can ignore this + } + (RegionKind::ReVar(sub_id), RegionKind::ReVar(sup_id)) => { + self.add_constraint(Constraint::VarSubVar(sub_id, sup_id)); + } + (_, RegionKind::ReVar(sup_id)) => { + self.add_constraint(Constraint::RegSubVar(sub, sup_id)); + } + (RegionKind::ReVar(sub_id), _) => { + self.add_constraint(Constraint::VarSubReg(sub_id, sup)); + } + _ => { + self.add_constraint(Constraint::RegSubReg(sub, sup)); + } + } + } + + pub(super) fn lub_regions( + &mut self, + db: DbInterner<'db>, + a: Region<'db>, + b: Region<'db>, + ) -> Region<'db> { + // cannot add constraints once regions are resolved + debug!("RegionConstraintCollector: lub_regions({:?}, {:?})", a, b); + #[expect(clippy::if_same_then_else)] + if a.is_static() || b.is_static() { + a // nothing lives longer than static + } else if a == b { + a // LUB(a,a) = a + } else { + self.combine_vars(db, Lub, a, b) + } + } + + pub(super) fn glb_regions( + &mut self, + db: DbInterner<'db>, + a: Region<'db>, + b: Region<'db>, + ) -> Region<'db> { + // cannot add constraints once regions are resolved + debug!("RegionConstraintCollector: glb_regions({:?}, {:?})", a, b); + #[expect(clippy::if_same_then_else)] + if a.is_static() { + b // static lives longer than everything else + } else if b.is_static() { + a // static lives longer than everything else + } else if a == b { + a // GLB(a,a) = a + } else { + self.combine_vars(db, Glb, a, b) + } + } + + /// Resolves a region var to its value in the unification table, if it exists. + /// Otherwise, it is resolved to the root `ReVar` in the table. + pub fn opportunistic_resolve_var( + &mut self, + cx: DbInterner<'db>, + vid: RegionVid, + ) -> Region<'db> { + let mut ut = self.unification_table_mut(); + let root_vid = ut.find(vid).vid; + match ut.probe_value(root_vid) { + RegionVariableValue::Known { value } => value, + RegionVariableValue::Unknown { .. } => Region::new_var(cx, root_vid), + } + } + + pub fn probe_value(&mut self, vid: RegionVid) -> Result, UniverseIndex> { + match self.unification_table_mut().probe_value(vid) { + RegionVariableValue::Known { value } => Ok(value), + RegionVariableValue::Unknown { universe } => Err(universe), + } + } + + fn combine_map(&mut self, t: CombineMapType) -> &mut CombineMap<'db> { + match t { + Glb => &mut self.storage.glbs, + Lub => &mut self.storage.lubs, + } + } + + fn combine_vars( + &mut self, + cx: DbInterner<'db>, + t: CombineMapType, + a: Region<'db>, + b: Region<'db>, + ) -> Region<'db> { + let vars = TwoRegions { a, b }; + if let Some(c) = self.combine_map(t.clone()).get(&vars) { + return Region::new_var(cx, *c); + } + let a_universe = self.universe(a); + let b_universe = self.universe(b); + let c_universe = cmp::max(a_universe, b_universe); + let c = self.new_region_var(c_universe); + self.combine_map(t.clone()).insert(vars.clone(), c); + self.undo_log.push(AddCombination(t.clone(), vars)); + let new_r = Region::new_var(cx, c); + for old_r in [a, b] { + match t { + Glb => self.make_subregion(new_r, old_r), + Lub => self.make_subregion(old_r, new_r), + } + } + debug!("combine_vars() c={:?}", c); + new_r + } + + pub fn universe(&mut self, region: Region<'db>) -> UniverseIndex { + match region.kind() { + RegionKind::ReStatic + | RegionKind::ReErased + | RegionKind::ReLateParam(..) + | RegionKind::ReEarlyParam(..) + | RegionKind::ReError(_) => UniverseIndex::ROOT, + RegionKind::RePlaceholder(placeholder) => placeholder.universe, + RegionKind::ReVar(vid) => match self.probe_value(vid) { + Ok(value) => self.universe(value), + Err(universe) => universe, + }, + RegionKind::ReBound(..) => panic!("universe(): encountered bound region {region:?}"), + } + } + + pub fn vars_since_snapshot(&self, value_count: usize) -> Range { + RegionVid::from(value_count)..RegionVid::from(self.storage.unification_table.len()) + } + + /// See `InferCtxt::region_constraints_added_in_snapshot`. + pub fn region_constraints_added_in_snapshot(&self, mark: &Snapshot) -> bool { + self.undo_log + .region_constraints_in_snapshot(mark) + .any(|elt| matches!(elt, AddConstraint(_))) + } + + #[inline] + fn unification_table_mut(&mut self) -> super::UnificationTable<'_, 'db, RegionVidKey<'db>> { + ut::UnificationTable::with_log(&mut self.storage.unification_table, self.undo_log) + } +} + +impl fmt::Debug for RegionSnapshot { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "RegionSnapshot") + } +} + +impl<'db> fmt::Debug for GenericKind<'db> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + GenericKind::Param(ref p) => write!(f, "{p:?}"), + GenericKind::Placeholder(ref p) => write!(f, "{p:?}"), + GenericKind::Alias(ref p) => write!(f, "{p:?}"), + } + } +} + +impl<'db> fmt::Display for GenericKind<'db> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + GenericKind::Param(ref p) => write!(f, "{p:?}"), + GenericKind::Placeholder(ref p) => write!(f, "{p:?}"), + GenericKind::Alias(ref p) => write!(f, "{p}"), + } + } +} + +impl<'db> GenericKind<'db> { + pub fn to_ty(&self, interner: DbInterner<'db>) -> Ty<'db> { + match *self { + GenericKind::Param(ref p) => (*p).to_ty(interner), + GenericKind::Placeholder(ref p) => Ty::new_placeholder(interner, *p), + GenericKind::Alias(ref p) => (*p).to_ty(interner), + } + } +} + +impl<'db> VerifyBound<'db> { + pub fn must_hold(&self) -> bool { + match self { + VerifyBound::IfEq(..) => false, + VerifyBound::OutlivedBy(re) => re.is_static(), + VerifyBound::IsEmpty => false, + VerifyBound::AnyBound(bs) => bs.iter().any(|b| b.must_hold()), + VerifyBound::AllBounds(bs) => bs.iter().all(|b| b.must_hold()), + } + } + + pub fn cannot_hold(&self) -> bool { + match self { + VerifyBound::IfEq(..) => false, + VerifyBound::IsEmpty => false, + VerifyBound::OutlivedBy(_) => false, + VerifyBound::AnyBound(bs) => bs.iter().all(|b| b.cannot_hold()), + VerifyBound::AllBounds(bs) => bs.iter().any(|b| b.cannot_hold()), + } + } + + pub fn or(self, vb: VerifyBound<'db>) -> VerifyBound<'db> { + if self.must_hold() || vb.cannot_hold() { + self + } else if self.cannot_hold() || vb.must_hold() { + vb + } else { + VerifyBound::AnyBound(vec![self, vb]) + } + } +} + +impl<'db> RegionConstraintData<'db> { + /// Returns `true` if this region constraint data contains no constraints, and `false` + /// otherwise. + pub fn is_empty(&self) -> bool { + let RegionConstraintData { constraints, member_constraints, verifys } = self; + constraints.is_empty() && member_constraints.is_empty() && verifys.is_empty() + } +} + +impl<'db> Rollback> for RegionConstraintStorage<'db> { + fn reverse(&mut self, undo: UndoLog<'db>) { + match undo { + AddVar(vid) => { + self.var_infos.pop().unwrap(); + assert_eq!(self.var_infos.len(), vid.index()); + } + AddConstraint(index) => { + self.data.constraints.pop().unwrap(); + assert_eq!(self.data.constraints.len(), index); + } + AddVerify(index) => { + self.data.verifys.pop(); + assert_eq!(self.data.verifys.len(), index); + } + AddCombination(Glb, ref regions) => { + self.glbs.remove(regions); + } + AddCombination(Lub, ref regions) => { + self.lubs.remove(regions); + } + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs new file mode 100644 index 0000000000000..7e2735db3b77a --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs @@ -0,0 +1,720 @@ +//! Type generation code. + +use std::mem; + +use rustc_hash::FxHashMap; +use rustc_type_ir::error::TypeError; +use rustc_type_ir::inherent::{Const as _, IntoKind, Ty as _}; +use rustc_type_ir::relate::VarianceDiagInfo; +use rustc_type_ir::{ + AliasRelationDirection, AliasTyKind, ConstVid, InferConst, InferCtxtLike, InferTy, RegionKind, + TermKind, TyVid, UniverseIndex, Variance, +}; +use rustc_type_ir::{Interner, TypeVisitable, TypeVisitableExt}; +use tracing::{debug, instrument, warn}; + +use super::{ + PredicateEmittingRelation, Relate, RelateResult, StructurallyRelateAliases, TypeRelation, +}; +use crate::next_solver::infer::type_variable::TypeVariableValue; +use crate::next_solver::infer::unify_key::ConstVariableValue; +use crate::next_solver::infer::{InferCtxt, relate}; +use crate::next_solver::util::MaxUniverse; +use crate::next_solver::{ + AliasTy, Binder, ClauseKind, Const, ConstKind, DbInterner, GenericArgs, PredicateKind, + ProjectionPredicate, Region, SolverDefId, Term, TermVid, Ty, TyKind, TypingMode, + UnevaluatedConst, +}; + +impl<'db> InferCtxt<'db> { + /// The idea is that we should ensure that the type variable `target_vid` + /// is equal to, a subtype of, or a supertype of `source_ty`. + /// + /// For this, we will instantiate `target_vid` with a *generalized* version + /// of `source_ty`. Generalization introduces other inference variables wherever + /// subtyping could occur. This also does the occurs checks, detecting whether + /// instantiating `target_vid` would result in a cyclic type. We eagerly error + /// in this case. + /// + /// This is *not* expected to be used anywhere except for an implementation of + /// `TypeRelation`. Do not use this, and instead please use `At::eq`, for all + /// other usecases (i.e. setting the value of a type var). + #[instrument(level = "debug", skip(self, relation))] + pub fn instantiate_ty_var>>( + &self, + relation: &mut R, + target_is_expected: bool, + target_vid: TyVid, + instantiation_variance: Variance, + source_ty: Ty<'db>, + ) -> RelateResult<'db, ()> { + debug_assert!(self.inner.borrow_mut().type_variables().probe(target_vid).is_unknown()); + + // Generalize `source_ty` depending on the current variance. As an example, assume + // `?target <: &'x ?1`, where `'x` is some free region and `?1` is an inference + // variable. + // + // Then the `generalized_ty` would be `&'?2 ?3`, where `'?2` and `?3` are fresh + // region/type inference variables. + // + // We then relate `generalized_ty <: source_ty`, adding constraints like `'x: '?2` and + // `?1 <: ?3`. + let Generalization { value_may_be_infer: generalized_ty, has_unconstrained_ty_var } = self + .generalize( + relation.structurally_relate_aliases(), + target_vid, + instantiation_variance, + source_ty, + )?; + + // Constrain `b_vid` to the generalized type `generalized_ty`. + if let TyKind::Infer(InferTy::TyVar(generalized_vid)) = generalized_ty.kind() { + self.inner.borrow_mut().type_variables().equate(target_vid, generalized_vid); + } else { + self.inner.borrow_mut().type_variables().instantiate(target_vid, generalized_ty); + } + + // See the comment on `Generalization::has_unconstrained_ty_var`. + if has_unconstrained_ty_var { + relation.register_predicates([ClauseKind::WellFormed(generalized_ty.into())]); + } + + // Finally, relate `generalized_ty` to `source_ty`, as described in previous comment. + // + // FIXME(#16847): This code is non-ideal because all these subtype + // relations wind up attributed to the same spans. We need + // to associate causes/spans with each of the relations in + // the stack to get this right. + if generalized_ty.is_ty_var() { + // This happens for cases like `::Assoc == ?0`. + // We can't instantiate `?0` here as that would result in a + // cyclic type. We instead delay the unification in case + // the alias can be normalized to something which does not + // mention `?0`. + let (lhs, rhs, direction) = match instantiation_variance { + Variance::Invariant => { + (generalized_ty.into(), source_ty.into(), AliasRelationDirection::Equate) + } + Variance::Covariant => { + (generalized_ty.into(), source_ty.into(), AliasRelationDirection::Subtype) + } + Variance::Contravariant => { + (source_ty.into(), generalized_ty.into(), AliasRelationDirection::Subtype) + } + Variance::Bivariant => unreachable!("bivariant generalization"), + }; + + relation.register_predicates([PredicateKind::AliasRelate(lhs, rhs, direction)]); + } else { + // NOTE: The `instantiation_variance` is not the same variance as + // used by the relation. When instantiating `b`, `target_is_expected` + // is flipped and the `instantiation_variance` is also flipped. To + // constrain the `generalized_ty` while using the original relation, + // we therefore only have to flip the arguments. + // + // ```ignore (not code) + // ?a rel B + // instantiate_ty_var(?a, B) # expected and variance not flipped + // B' rel B + // ``` + // or + // ```ignore (not code) + // A rel ?b + // instantiate_ty_var(?b, A) # expected and variance flipped + // A rel A' + // ``` + if target_is_expected { + relation.relate(generalized_ty, source_ty)?; + } else { + debug!("flip relation"); + relation.relate(source_ty, generalized_ty)?; + } + } + + Ok(()) + } + + /// Instantiates the const variable `target_vid` with the given constant. + /// + /// This also tests if the given const `ct` contains an inference variable which was previously + /// unioned with `target_vid`. If this is the case, inferring `target_vid` to `ct` + /// would result in an infinite type as we continuously replace an inference variable + /// in `ct` with `ct` itself. + /// + /// This is especially important as unevaluated consts use their parents generics. + /// They therefore often contain unused args, making these errors far more likely. + /// + /// A good example of this is the following: + /// + /// ```compile_fail,E0308 + /// #![feature(generic_const_exprs)] + /// + /// fn bind(value: [u8; N]) -> [u8; 3 + 4] { + /// todo!() + /// } + /// + /// fn main() { + /// let mut arr = Default::default(); + /// arr = bind(arr); + /// } + /// ``` + /// + /// Here `3 + 4` ends up as `ConstKind::Unevaluated` which uses the generics + /// of `fn bind` (meaning that its args contain `N`). + /// + /// `bind(arr)` now infers that the type of `arr` must be `[u8; N]`. + /// The assignment `arr = bind(arr)` now tries to equate `N` with `3 + 4`. + /// + /// As `3 + 4` contains `N` in its args, this must not succeed. + /// + /// See `tests/ui/const-generics/occurs-check/` for more examples where this is relevant. + #[instrument(level = "debug", skip(self, relation))] + pub(crate) fn instantiate_const_var>>( + &self, + relation: &mut R, + target_is_expected: bool, + target_vid: ConstVid, + source_ct: Const<'db>, + ) -> RelateResult<'db, ()> { + // FIXME(generic_const_exprs): Occurs check failures for unevaluated + // constants and generic expressions are not yet handled correctly. + let Generalization { value_may_be_infer: generalized_ct, has_unconstrained_ty_var } = self + .generalize( + relation.structurally_relate_aliases(), + target_vid, + Variance::Invariant, + source_ct, + )?; + + debug_assert!(!generalized_ct.is_ct_infer()); + if has_unconstrained_ty_var { + panic!("unconstrained ty var when generalizing `{source_ct:?}`"); + } + + self.inner + .borrow_mut() + .const_unification_table() + .union_value(target_vid, ConstVariableValue::Known { value: generalized_ct }); + + // Make sure that the order is correct when relating the + // generalized const and the source. + if target_is_expected { + relation.relate_with_variance( + Variance::Invariant, + VarianceDiagInfo::default(), + generalized_ct, + source_ct, + )?; + } else { + relation.relate_with_variance( + Variance::Invariant, + VarianceDiagInfo::default(), + source_ct, + generalized_ct, + )?; + } + + Ok(()) + } + + /// Attempts to generalize `source_term` for the type variable `target_vid`. + /// This checks for cycles -- that is, whether `source_term` references `target_vid`. + fn generalize> + Relate>>( + &self, + structurally_relate_aliases: StructurallyRelateAliases, + target_vid: impl Into, + ambient_variance: Variance, + source_term: T, + ) -> RelateResult<'db, Generalization> { + assert!(!source_term.clone().has_escaping_bound_vars()); + let (for_universe, root_vid) = match target_vid.into() { + TermVid::Ty(ty_vid) => { + (self.probe_ty_var(ty_vid).unwrap_err(), TermVid::Ty(self.root_var(ty_vid))) + } + TermVid::Const(ct_vid) => ( + self.probe_const_var(ct_vid).unwrap_err(), + TermVid::Const(self.inner.borrow_mut().const_unification_table().find(ct_vid).vid), + ), + }; + + let mut generalizer = Generalizer { + infcx: self, + structurally_relate_aliases, + root_vid, + for_universe, + root_term: source_term.into(), + ambient_variance, + in_alias: false, + cache: Default::default(), + has_unconstrained_ty_var: false, + }; + + let value_may_be_infer = generalizer.relate(source_term, source_term)?; + let has_unconstrained_ty_var = generalizer.has_unconstrained_ty_var; + Ok(Generalization { value_may_be_infer, has_unconstrained_ty_var }) + } +} + +/// The "generalizer" is used when handling inference variables. +/// +/// The basic strategy for handling a constraint like `?A <: B` is to +/// apply a "generalization strategy" to the term `B` -- this replaces +/// all the lifetimes in the term `B` with fresh inference variables. +/// (You can read more about the strategy in this [blog post].) +/// +/// As an example, if we had `?A <: &'x u32`, we would generalize `&'x +/// u32` to `&'0 u32` where `'0` is a fresh variable. This becomes the +/// value of `A`. Finally, we relate `&'0 u32 <: &'x u32`, which +/// establishes `'0: 'x` as a constraint. +/// +/// [blog post]: https://is.gd/0hKvIr +struct Generalizer<'me, 'db> { + infcx: &'me InferCtxt<'db>, + + /// Whether aliases should be related structurally. If not, we have to + /// be careful when generalizing aliases. + structurally_relate_aliases: StructurallyRelateAliases, + + /// The vid of the type variable that is in the process of being + /// instantiated. If we find this within the value we are folding, + /// that means we would have created a cyclic value. + root_vid: TermVid, + + /// The universe of the type variable that is in the process of being + /// instantiated. If we find anything that this universe cannot name, + /// we reject the relation. + for_universe: UniverseIndex, + + /// The root term (const or type) we're generalizing. Used for cycle errors. + root_term: Term<'db>, + + /// After we generalize this type, we are going to relate it to + /// some other type. What will be the variance at this point? + ambient_variance: Variance, + + /// This is set once we're generalizing the arguments of an alias. + /// + /// This is necessary to correctly handle + /// `::Assoc>::Assoc == ?0`. This equality can + /// hold by either normalizing the outer or the inner associated type. + in_alias: bool, + + cache: FxHashMap<(Ty<'db>, Variance, bool), Ty<'db>>, + + /// See the field `has_unconstrained_ty_var` in `Generalization`. + has_unconstrained_ty_var: bool, +} + +impl<'db> Generalizer<'_, 'db> { + /// Create an error that corresponds to the term kind in `root_term` + fn cyclic_term_error(&self) -> TypeError> { + match self.root_term.kind() { + TermKind::Ty(ty) => TypeError::CyclicTy(ty), + TermKind::Const(ct) => TypeError::CyclicConst(ct), + } + } + + /// Create a new type variable in the universe of the target when + /// generalizing an alias. This has to set `has_unconstrained_ty_var` + /// if we're currently in a bivariant context. + fn next_ty_var_for_alias(&mut self) -> Ty<'db> { + self.has_unconstrained_ty_var |= self.ambient_variance == Variance::Bivariant; + self.infcx.next_ty_var_in_universe(self.for_universe) + } + + /// An occurs check failure inside of an alias does not mean + /// that the types definitely don't unify. We may be able + /// to normalize the alias after all. + /// + /// We handle this by lazily equating the alias and generalizing + /// it to an inference variable. In the new solver, we always + /// generalize to an infer var unless the alias contains escaping + /// bound variables. + /// + /// Correctly handling aliases with escaping bound variables is + /// difficult and currently incomplete in two opposite ways: + /// - if we get an occurs check failure in the alias, replace it with a new infer var. + /// This causes us to later emit an alias-relate goal and is incomplete in case the + /// alias normalizes to type containing one of the bound variables. + /// - if the alias contains an inference variable not nameable by `for_universe`, we + /// continue generalizing the alias. This ends up pulling down the universe of the + /// inference variable and is incomplete in case the alias would normalize to a type + /// which does not mention that inference variable. + fn generalize_alias_ty( + &mut self, + alias: AliasTy<'db>, + ) -> Result, TypeError>> { + // We do not eagerly replace aliases with inference variables if they have + // escaping bound vars, see the method comment for details. However, when we + // are inside of an alias with escaping bound vars replacing nested aliases + // with inference variables can cause incorrect ambiguity. + // + // cc trait-system-refactor-initiative#110 + if !alias.has_escaping_bound_vars() && !self.in_alias { + return Ok(self.next_ty_var_for_alias()); + } + + let is_nested_alias = mem::replace(&mut self.in_alias, true); + let result = match self.relate(alias, alias) { + Ok(alias) => Ok(alias.to_ty(self.cx())), + Err(e) => { + if is_nested_alias { + return Err(e); + } else { + let mut visitor = MaxUniverse::new(); + alias.visit_with(&mut visitor); + let infer_replacement_is_complete = + self.for_universe.can_name(visitor.max_universe()) + && !alias.has_escaping_bound_vars(); + if !infer_replacement_is_complete { + warn!("may incompletely handle alias type: {alias:?}"); + } + + debug!("generalization failure in alias"); + Ok(self.next_ty_var_for_alias()) + } + } + }; + self.in_alias = is_nested_alias; + result + } +} + +impl<'db> TypeRelation> for Generalizer<'_, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.infcx.interner + } + + fn relate_item_args( + &mut self, + item_def_id: SolverDefId, + a_arg: GenericArgs<'db>, + b_arg: GenericArgs<'db>, + ) -> RelateResult<'db, GenericArgs<'db>> { + if self.ambient_variance == Variance::Invariant { + // Avoid fetching the variance if we are in an invariant + // context; no need, and it can induce dependency cycles + // (e.g., #41849). + relate::relate_args_invariantly(self, a_arg, b_arg) + } else { + let tcx = self.cx(); + let opt_variances = tcx.variances_of(item_def_id); + relate::relate_args_with_variances( + self, + item_def_id, + opt_variances, + a_arg, + b_arg, + false, + ) + } + } + + #[instrument(level = "debug", skip(self, variance, b), ret)] + fn relate_with_variance>>( + &mut self, + variance: Variance, + _info: VarianceDiagInfo>, + a: T, + b: T, + ) -> RelateResult<'db, T> { + let old_ambient_variance = self.ambient_variance; + self.ambient_variance = self.ambient_variance.xform(variance); + debug!(?self.ambient_variance, "new ambient variance"); + // Recursive calls to `relate` can overflow the stack. For example a deeper version of + // `ui/associated-consts/issue-93775.rs`. + let r = self.relate(a, b); + self.ambient_variance = old_ambient_variance; + r + } + + #[instrument(level = "debug", skip(self, t2), ret)] + fn tys(&mut self, t: Ty<'db>, t2: Ty<'db>) -> RelateResult<'db, Ty<'db>> { + assert_eq!(t, t2); // we are misusing TypeRelation here; both LHS and RHS ought to be == + + if let Some(result) = self.cache.get(&(t, self.ambient_variance, self.in_alias)) { + return Ok(*result); + } + + // Check to see whether the type we are generalizing references + // any other type variable related to `vid` via + // subtyping. This is basically our "occurs check", preventing + // us from creating infinitely sized types. + let g = match t.kind() { + TyKind::Infer( + InferTy::FreshTy(_) | InferTy::FreshIntTy(_) | InferTy::FreshFloatTy(_), + ) => { + panic!("unexpected infer type: {t:?}") + } + + TyKind::Infer(InferTy::TyVar(vid)) => { + let mut inner = self.infcx.inner.borrow_mut(); + let vid = inner.type_variables().root_var(vid); + if TermVid::Ty(vid) == self.root_vid { + // If sub-roots are equal, then `root_vid` and + // `vid` are related via subtyping. + Err(self.cyclic_term_error()) + } else { + let probe = inner.type_variables().probe(vid); + match probe { + TypeVariableValue::Known { value: u } => { + drop(inner); + self.relate(u, u) + } + TypeVariableValue::Unknown { universe } => { + match self.ambient_variance { + // Invariant: no need to make a fresh type variable + // if we can name the universe. + Variance::Invariant => { + if self.for_universe.can_name(universe) { + return Ok(t); + } + } + + // Bivariant: make a fresh var, but remember that + // it is unconstrained. See the comment in + // `Generalization`. + Variance::Bivariant => self.has_unconstrained_ty_var = true, + + // Co/contravariant: this will be + // sufficiently constrained later on. + Variance::Covariant | Variance::Contravariant => (), + } + + let origin = inner.type_variables().var_origin(vid); + let new_var_id = + inner.type_variables().new_var(self.for_universe, origin); + // If we're in the new solver and create a new inference + // variable inside of an alias we eagerly constrain that + // inference variable to prevent unexpected ambiguity errors. + // + // This is incomplete as it pulls down the universe of the + // original inference variable, even though the alias could + // normalize to a type which does not refer to that type at + // all. I don't expect this to cause unexpected errors in + // practice. + // + // We only need to do so for type and const variables, as + // region variables do not impact normalization, and will get + // correctly constrained by `AliasRelate` later on. + // + // cc trait-system-refactor-initiative#108 + if self.infcx.next_trait_solver() + && !matches!( + self.infcx.typing_mode_unchecked(), + TypingMode::Coherence + ) + && self.in_alias + { + inner.type_variables().equate(vid, new_var_id); + } + + debug!("replacing original vid={:?} with new={:?}", vid, new_var_id); + Ok(Ty::new_var(self.infcx.interner, new_var_id)) + } + } + } + } + + TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => { + // No matter what mode we are in, + // integer/floating-point types must be equal to be + // relatable. + Ok(t) + } + + TyKind::Placeholder(placeholder) => { + if self.for_universe.can_name(placeholder.universe) { + Ok(t) + } else { + debug!( + "root universe {:?} cannot name placeholder in universe {:?}", + self.for_universe, placeholder.universe + ); + Err(TypeError::Mismatch) + } + } + + TyKind::Alias(_, data) => match self.structurally_relate_aliases { + StructurallyRelateAliases::No => self.generalize_alias_ty(data), + StructurallyRelateAliases::Yes => relate::structurally_relate_tys(self, t, t), + }, + + _ => relate::structurally_relate_tys(self, t, t), + }?; + + self.cache.insert((t, self.ambient_variance, self.in_alias), g); + Ok(g) + } + + #[instrument(level = "debug", skip(self, r2), ret)] + fn regions(&mut self, r: Region<'db>, r2: Region<'db>) -> RelateResult<'db, Region<'db>> { + assert_eq!(r, r2); // we are misusing TypeRelation here; both LHS and RHS ought to be == + + match r.kind() { + // Never make variables for regions bound within the type itself, + // nor for erased regions. + RegionKind::ReBound(..) | RegionKind::ReErased => { + return Ok(r); + } + + // It doesn't really matter for correctness if we generalize ReError, + // since we're already on a doomed compilation path. + RegionKind::ReError(_) => { + return Ok(r); + } + + RegionKind::RePlaceholder(..) + | RegionKind::ReVar(..) + | RegionKind::ReStatic + | RegionKind::ReEarlyParam(..) + | RegionKind::ReLateParam(..) => { + // see common code below + } + } + + // If we are in an invariant context, we can re-use the region + // as is, unless it happens to be in some universe that we + // can't name. + if let Variance::Invariant = self.ambient_variance { + let r_universe = self.infcx.universe_of_region(r); + if self.for_universe.can_name(r_universe) { + return Ok(r); + } + } + + Ok(self.infcx.next_region_var_in_universe(self.for_universe)) + } + + #[instrument(level = "debug", skip(self, c2), ret)] + fn consts(&mut self, c: Const<'db>, c2: Const<'db>) -> RelateResult<'db, Const<'db>> { + assert_eq!(c, c2); // we are misusing TypeRelation here; both LHS and RHS ought to be == + + match c.kind() { + ConstKind::Infer(InferConst::Var(vid)) => { + // If root const vids are equal, then `root_vid` and + // `vid` are related and we'd be inferring an infinitely + // deep const. + if TermVid::Const( + self.infcx.inner.borrow_mut().const_unification_table().find(vid).vid, + ) == self.root_vid + { + return Err(self.cyclic_term_error()); + } + + let mut inner = self.infcx.inner.borrow_mut(); + let variable_table = &mut inner.const_unification_table(); + match variable_table.probe_value(vid) { + ConstVariableValue::Known { value: u } => { + drop(inner); + self.relate(u, u) + } + ConstVariableValue::Unknown { origin, universe } => { + if self.for_universe.can_name(universe) { + Ok(c) + } else { + let new_var_id = variable_table + .new_key(ConstVariableValue::Unknown { + origin, + universe: self.for_universe, + }) + .vid; + + // See the comment for type inference variables + // for more details. + if self.infcx.next_trait_solver() + && !matches!( + self.infcx.typing_mode_unchecked(), + TypingMode::Coherence + ) + && self.in_alias + { + variable_table.union(vid, new_var_id); + } + Ok(Const::new_var(self.infcx.interner, new_var_id)) + } + } + } + } + // FIXME: Unevaluated constants are also not rigid, so the current + // approach of always relating them structurally is incomplete. + // + // FIXME: remove this branch once `structurally_relate_consts` is fully + // structural. + ConstKind::Unevaluated(UnevaluatedConst { def, args }) => { + let args = self.relate_with_variance( + Variance::Invariant, + VarianceDiagInfo::default(), + args, + args, + )?; + Ok(Const::new_unevaluated(self.infcx.interner, UnevaluatedConst { def, args })) + } + ConstKind::Placeholder(placeholder) => { + if self.for_universe.can_name(placeholder.universe) { + Ok(c) + } else { + debug!( + "root universe {:?} cannot name placeholder in universe {:?}", + self.for_universe, placeholder.universe + ); + Err(TypeError::Mismatch) + } + } + _ => relate::structurally_relate_consts(self, c, c), + } + } + + #[instrument(level = "debug", skip(self), ret)] + fn binders( + &mut self, + a: Binder<'db, T>, + _: Binder<'db, T>, + ) -> RelateResult<'db, Binder<'db, T>> + where + T: Relate>, + { + let result = self.relate(a.skip_binder(), a.skip_binder())?; + Ok(a.rebind(result)) + } +} + +/// Result from a generalization operation. This includes +/// not only the generalized type, but also a bool flag +/// indicating whether further WF checks are needed. +#[derive(Debug)] +struct Generalization { + /// When generalizing `::Assoc` or + /// `::Assoc>>::Assoc` + /// for `?0` generalization returns an inference + /// variable. + /// + /// This has to be handled wotj care as it can + /// otherwise very easily result in infinite + /// recursion. + pub value_may_be_infer: T, + + /// In general, we do not check whether all types which occur during + /// type checking are well-formed. We only check wf of user-provided types + /// and when actually using a type, e.g. for method calls. + /// + /// This means that when subtyping, we may end up with unconstrained + /// inference variables if a generalized type has bivariant parameters. + /// A parameter may only be bivariant if it is constrained by a projection + /// bound in a where-clause. As an example, imagine a type: + /// + /// struct Foo where A: Iterator { + /// data: A + /// } + /// + /// here, `A` will be covariant, but `B` is unconstrained. + /// + /// However, whatever it is, for `Foo` to be WF, it must be equal to `A::Item`. + /// If we have an input `Foo`, then after generalization we will wind + /// up with a type like `Foo`. When we enforce `Foo <: Foo`, + /// we will wind up with the requirement that `?A <: ?C`, but no particular + /// relationship between `?B` and `?D` (after all, these types may be completely + /// different). If we do nothing else, this may mean that `?D` goes unconstrained + /// (as in #41677). To avoid this we emit a `WellFormed` obligation in these cases. + pub has_unconstrained_ty_var: bool, +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/higher_ranked.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/higher_ranked.rs new file mode 100644 index 0000000000000..62028e0e70399 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/higher_ranked.rs @@ -0,0 +1,89 @@ +//! Helper routines for higher-ranked things. See the `doc` module at +//! the end of the file for details. + +use rustc_type_ir::TypeFoldable; +use rustc_type_ir::{BoundVar, UniverseIndex}; +use tracing::{debug, instrument}; + +use super::RelateResult; +use crate::next_solver::fold::FnMutDelegate; +use crate::next_solver::infer::InferCtxt; +use crate::next_solver::infer::snapshot::CombinedSnapshot; +use crate::next_solver::{ + Binder, BoundConst, BoundRegion, BoundTy, Const, DbInterner, PlaceholderConst, + PlaceholderRegion, PlaceholderTy, Region, Ty, +}; + +impl<'db> InferCtxt<'db> { + /// Replaces all bound variables (lifetimes, types, and constants) bound by + /// `binder` with placeholder variables in a new universe. This means that the + /// new placeholders can only be named by inference variables created after + /// this method has been called. + /// + /// This is the first step of checking subtyping when higher-ranked things are involved. + /// For more details visit the relevant sections of the [rustc dev guide]. + /// + /// `fn enter_forall` should be preferred over this method. + /// + /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html + #[instrument(level = "debug", skip(self), ret)] + pub fn enter_forall_and_leak_universe(&self, binder: Binder<'db, T>) -> T + where + T: TypeFoldable> + Clone, + { + if let Some(inner) = binder.clone().no_bound_vars() { + return inner; + } + + let next_universe = self.create_next_universe(); + + let delegate = FnMutDelegate { + regions: &mut |br: BoundRegion| { + Region::new_placeholder( + self.interner, + PlaceholderRegion { universe: next_universe, bound: br }, + ) + }, + types: &mut |bound_ty: BoundTy| { + Ty::new_placeholder( + self.interner, + PlaceholderTy { universe: next_universe, bound: bound_ty }, + ) + }, + consts: &mut |bound: BoundConst| { + Const::new_placeholder( + self.interner, + PlaceholderConst { universe: next_universe, bound }, + ) + }, + }; + + debug!(?next_universe); + self.interner.replace_bound_vars_uncached(binder, delegate) + } + + /// Replaces all bound variables (lifetimes, types, and constants) bound by + /// `binder` with placeholder variables in a new universe and then calls the + /// closure `f` with the instantiated value. The new placeholders can only be + /// named by inference variables created inside of the closure `f` or afterwards. + /// + /// This is the first step of checking subtyping when higher-ranked things are involved. + /// For more details visit the relevant sections of the [rustc dev guide]. + /// + /// This method should be preferred over `fn enter_forall_and_leak_universe`. + /// + /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html + #[instrument(level = "debug", skip(self, f))] + pub fn enter_forall(&self, forall: Binder<'db, T>, f: impl FnOnce(T) -> U) -> U + where + T: TypeFoldable> + Clone, + { + // FIXME: currently we do nothing to prevent placeholders with the new universe being + // used after exiting `f`. For example region subtyping can result in outlives constraints + // that name placeholders created in this function. Nested goals from type relations can + // also contain placeholders created by this function. + let value = self.enter_forall_and_leak_universe(forall); + debug!(?value); + f(value) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs new file mode 100644 index 0000000000000..c7f771ffe37f7 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs @@ -0,0 +1,269 @@ +//! # Lattice variables +//! +//! Generic code for operating on [lattices] of inference variables +//! that are characterized by an upper- and lower-bound. +//! +//! The code is defined quite generically so that it can be +//! applied both to type variables, which represent types being inferred, +//! and fn variables, which represent function types being inferred. +//! (It may eventually be applied to their types as well.) +//! In some cases, the functions are also generic with respect to the +//! operation on the lattice (GLB vs LUB). +//! +//! ## Note +//! +//! Although all the functions are generic, for simplicity, comments in the source code +//! generally refer to type variables and the LUB operation. +//! +//! [lattices]: https://en.wikipedia.org/wiki/Lattice_(order) + +use rustc_type_ir::{ + AliasRelationDirection, TypeVisitableExt, Upcast, Variance, + inherent::{IntoKind, Span as _}, + relate::{ + Relate, StructurallyRelateAliases, TypeRelation, VarianceDiagInfo, + combine::{PredicateEmittingRelation, super_combine_consts, super_combine_tys}, + }, +}; + +use crate::next_solver::{ + AliasTy, Binder, Const, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Region, Span, Ty, + TyKind, + infer::{ + DefineOpaqueTypes, InferCtxt, TypeTrace, + relate::RelateResult, + traits::{Obligation, PredicateObligations}, + }, +}; + +#[derive(Clone, Copy)] +pub(crate) enum LatticeOpKind { + Glb, + Lub, +} + +impl LatticeOpKind { + fn invert(self) -> Self { + match self { + LatticeOpKind::Glb => LatticeOpKind::Lub, + LatticeOpKind::Lub => LatticeOpKind::Glb, + } + } +} + +/// A greatest lower bound" (common subtype) or least upper bound (common supertype). +pub(crate) struct LatticeOp<'infcx, 'db> { + infcx: &'infcx InferCtxt<'db>, + // Immutable fields + trace: TypeTrace<'db>, + param_env: ParamEnv<'db>, + // Mutable fields + kind: LatticeOpKind, + obligations: PredicateObligations<'db>, +} + +impl<'infcx, 'db> LatticeOp<'infcx, 'db> { + pub(crate) fn new( + infcx: &'infcx InferCtxt<'db>, + trace: TypeTrace<'db>, + param_env: ParamEnv<'db>, + kind: LatticeOpKind, + ) -> LatticeOp<'infcx, 'db> { + LatticeOp { infcx, trace, param_env, kind, obligations: PredicateObligations::new() } + } + + pub(crate) fn into_obligations(self) -> PredicateObligations<'db> { + self.obligations + } +} + +impl<'db> TypeRelation> for LatticeOp<'_, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.infcx.interner + } + + fn relate_with_variance>>( + &mut self, + variance: Variance, + _info: VarianceDiagInfo>, + a: T, + b: T, + ) -> RelateResult<'db, T> { + match variance { + Variance::Invariant => { + self.obligations.extend( + self.infcx + .at(&self.trace.cause, self.param_env) + .eq_trace(DefineOpaqueTypes::Yes, self.trace.clone(), a, b)? + .into_obligations(), + ); + Ok(a) + } + Variance::Covariant => self.relate(a, b), + // FIXME(#41044) -- not correct, need test + Variance::Bivariant => Ok(a), + Variance::Contravariant => { + self.kind = self.kind.invert(); + let res = self.relate(a, b); + self.kind = self.kind.invert(); + res + } + } + } + + /// Relates two types using a given lattice. + fn tys(&mut self, a: Ty<'db>, b: Ty<'db>) -> RelateResult<'db, Ty<'db>> { + if a == b { + return Ok(a); + } + + let infcx = self.infcx; + + let a = infcx.shallow_resolve(a); + let b = infcx.shallow_resolve(b); + + match (a.kind(), b.kind()) { + // If one side is known to be a variable and one is not, + // create a variable (`v`) to represent the LUB. Make sure to + // relate `v` to the non-type-variable first (by passing it + // first to `relate_bound`). Otherwise, we would produce a + // subtype obligation that must then be processed. + // + // Example: if the LHS is a type variable, and RHS is + // `Box`, then we current compare `v` to the RHS first, + // which will instantiate `v` with `Box`. Then when `v` + // is compared to the LHS, we instantiate LHS with `Box`. + // But if we did in reverse order, we would create a `v <: + // LHS` (or vice versa) constraint and then instantiate + // `v`. This would require further processing to achieve same + // end-result; in particular, this screws up some of the logic + // in coercion, which expects LUB to figure out that the LHS + // is (e.g.) `Box`. A more obvious solution might be to + // iterate on the subtype obligations that are returned, but I + // think this suffices. -nmatsakis + (TyKind::Infer(rustc_type_ir::TyVar(..)), _) => { + let v = infcx.next_ty_var(); + self.relate_bound(v, b, a)?; + Ok(v) + } + (_, TyKind::Infer(rustc_type_ir::TyVar(..))) => { + let v = infcx.next_ty_var(); + self.relate_bound(v, a, b)?; + Ok(v) + } + + ( + TyKind::Alias(rustc_type_ir::Opaque, AliasTy { def_id: a_def_id, .. }), + TyKind::Alias(rustc_type_ir::Opaque, AliasTy { def_id: b_def_id, .. }), + ) if a_def_id == b_def_id => super_combine_tys(infcx, self, a, b), + + _ => super_combine_tys(infcx, self, a, b), + } + } + + fn regions(&mut self, a: Region<'db>, b: Region<'db>) -> RelateResult<'db, Region<'db>> { + let mut inner = self.infcx.inner.borrow_mut(); + let mut constraints = inner.unwrap_region_constraints(); + Ok(match self.kind { + // GLB(&'static u8, &'a u8) == &RegionLUB('static, 'a) u8 == &'static u8 + LatticeOpKind::Glb => constraints.lub_regions(self.cx(), a, b), + + // LUB(&'static u8, &'a u8) == &RegionGLB('static, 'a) u8 == &'a u8 + LatticeOpKind::Lub => constraints.glb_regions(self.cx(), a, b), + }) + } + + fn consts(&mut self, a: Const<'db>, b: Const<'db>) -> RelateResult<'db, Const<'db>> { + super_combine_consts(self.infcx, self, a, b) + } + + fn binders( + &mut self, + a: Binder<'db, T>, + b: Binder<'db, T>, + ) -> RelateResult<'db, Binder<'db, T>> + where + T: Relate>, + { + // GLB/LUB of a binder and itself is just itself + if a == b { + return Ok(a); + } + + if a.skip_binder().has_escaping_bound_vars() || b.skip_binder().has_escaping_bound_vars() { + // When higher-ranked types are involved, computing the GLB/LUB is + // very challenging, switch to invariance. This is obviously + // overly conservative but works ok in practice. + self.relate_with_variance(Variance::Invariant, VarianceDiagInfo::default(), a, b)?; + Ok(a) + } else { + Ok(Binder::dummy(self.relate(a.skip_binder(), b.skip_binder())?)) + } + } +} + +impl<'infcx, 'db> LatticeOp<'infcx, 'db> { + // Relates the type `v` to `a` and `b` such that `v` represents + // the LUB/GLB of `a` and `b` as appropriate. + // + // Subtle hack: ordering *may* be significant here. This method + // relates `v` to `a` first, which may help us to avoid unnecessary + // type variable obligations. See caller for details. + fn relate_bound(&mut self, v: Ty<'db>, a: Ty<'db>, b: Ty<'db>) -> RelateResult<'db, ()> { + let at = self.infcx.at(&self.trace.cause, self.param_env); + match self.kind { + LatticeOpKind::Glb => { + self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, a)?.into_obligations()); + self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, b)?.into_obligations()); + } + LatticeOpKind::Lub => { + self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, a, v)?.into_obligations()); + self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, b, v)?.into_obligations()); + } + } + Ok(()) + } +} + +impl<'db> PredicateEmittingRelation> for LatticeOp<'_, 'db> { + fn span(&self) -> Span { + Span::dummy() + } + + fn structurally_relate_aliases(&self) -> StructurallyRelateAliases { + StructurallyRelateAliases::No + } + + fn param_env(&self) -> ParamEnv<'db> { + self.param_env + } + + fn register_predicates( + &mut self, + preds: impl IntoIterator, Predicate<'db>>>, + ) { + self.obligations.extend(preds.into_iter().map(|pred| { + Obligation::new(self.infcx.interner, self.trace.cause.clone(), self.param_env, pred) + })) + } + + fn register_goals(&mut self, goals: impl IntoIterator>>) { + self.obligations.extend(goals.into_iter().map(|goal| { + Obligation::new( + self.infcx.interner, + self.trace.cause.clone(), + goal.param_env, + goal.predicate, + ) + })) + } + + fn register_alias_relate_predicate(&mut self, a: Ty<'db>, b: Ty<'db>) { + self.register_predicates([Binder::dummy(PredicateKind::AliasRelate( + a.into(), + b.into(), + // FIXME(deferred_projection_equality): This isn't right, I think? + AliasRelationDirection::Equate, + ))]); + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/mod.rs new file mode 100644 index 0000000000000..0cc1cf756a9c2 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/mod.rs @@ -0,0 +1,14 @@ +//! This module contains the definitions of most `TypeRelation`s in the type system +//! (except for some relations used for diagnostics and heuristics in the compiler). +//! As well as the implementation of `Relate` for interned things (`Ty`/`Const`/etc). + +pub use rustc_type_ir::relate::combine::PredicateEmittingRelation; +pub use rustc_type_ir::relate::*; + +use crate::next_solver::DbInterner; + +mod generalize; +mod higher_ranked; +pub(crate) mod lattice; + +pub type RelateResult<'db, T> = rustc_type_ir::relate::RelateResult, T>; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/resolve.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/resolve.rs new file mode 100644 index 0000000000000..84338ade6e354 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/resolve.rs @@ -0,0 +1,62 @@ +//! Things for resolving vars in the infer context of the next-trait-solver. + +use rustc_type_ir::{ + ConstKind, FallibleTypeFolder, InferConst, InferTy, RegionKind, TyKind, TypeFoldable, + TypeFolder, TypeSuperFoldable, TypeVisitableExt, data_structures::DelayedMap, + inherent::IntoKind, +}; + +use crate::next_solver::{Const, DbInterner, Region, Ty}; + +use super::{FixupError, FixupResult, InferCtxt}; + +/////////////////////////////////////////////////////////////////////////// +// OPPORTUNISTIC VAR RESOLVER + +/// The opportunistic resolver can be used at any time. It simply replaces +/// type/const variables that have been unified with the things they have +/// been unified with (similar to `shallow_resolve`, but deep). This is +/// useful for printing messages etc but also required at various +/// points for correctness. +pub struct OpportunisticVarResolver<'a, 'db> { + infcx: &'a InferCtxt<'db>, + /// We're able to use a cache here as the folder does + /// not have any mutable state. + cache: DelayedMap, Ty<'db>>, +} + +impl<'a, 'db> OpportunisticVarResolver<'a, 'db> { + #[inline] + pub fn new(infcx: &'a InferCtxt<'db>) -> Self { + OpportunisticVarResolver { infcx, cache: Default::default() } + } +} + +impl<'a, 'db> TypeFolder> for OpportunisticVarResolver<'a, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.infcx.interner + } + + #[inline] + fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { + if !t.has_non_region_infer() { + t // micro-optimize -- if there is nothing in this type that this fold affects... + } else if let Some(ty) = self.cache.get(&t) { + *ty + } else { + let shallow = self.infcx.shallow_resolve(t); + let res = shallow.super_fold_with(self); + assert!(self.cache.insert(t, res)); + res + } + } + + fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> { + if !ct.has_non_region_infer() { + ct // micro-optimize -- if there is nothing in this const that this fold affects... + } else { + let ct = self.infcx.shallow_resolve_const(ct); + ct.super_fold_with(self) + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs new file mode 100644 index 0000000000000..4f111fa662668 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs @@ -0,0 +1,334 @@ +use std::ops::ControlFlow; + +use hir_def::{ImplId, TraitId}; +use rustc_type_ir::{ + Interner, + solve::{BuiltinImplSource, CandidateSource, Certainty, inspect::ProbeKind}, +}; + +use crate::{ + db::InternedOpaqueTyId, + next_solver::{ + Const, ErrorGuaranteed, GenericArgs, Goal, TraitRef, Ty, TypeError, + infer::{ + InferCtxt, + traits::{Obligation, ObligationCause, PredicateObligation, TraitObligation}, + }, + inspect::{InspectCandidate, InspectGoal, ProofTreeVisitor}, + }, +}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum SelectionError<'db> { + /// The trait is not implemented. + Unimplemented, + /// After a closure impl has selected, its "outputs" were evaluated + /// (which for closures includes the "input" type params) and they + /// didn't resolve. See `confirm_poly_trait_refs` for more. + SignatureMismatch(Box>), + /// The trait pointed by `DefId` is dyn-incompatible. + TraitDynIncompatible(TraitId), + /// A given constant couldn't be evaluated. + NotConstEvaluatable(NotConstEvaluatable), + /// Exceeded the recursion depth during type projection. + Overflow(OverflowError), + /// Computing an opaque type's hidden type caused an error (e.g. a cycle error). + /// We can thus not know whether the hidden type implements an auto trait, so + /// we should not presume anything about it. + OpaqueTypeAutoTraitLeakageUnknown(InternedOpaqueTyId), + /// Error for a `ConstArgHasType` goal + ConstArgHasWrongType { ct: Const<'db>, ct_ty: Ty<'db>, expected_ty: Ty<'db> }, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum NotConstEvaluatable { + Error(ErrorGuaranteed), + MentionsInfer, + MentionsParam, +} + +/// Indicates that trait evaluation caused overflow and in which pass. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum OverflowError { + Error(ErrorGuaranteed), + Canonical, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct SignatureMismatchData<'db> { + pub found_trait_ref: TraitRef<'db>, + pub expected_trait_ref: TraitRef<'db>, + pub terr: TypeError<'db>, +} + +/// When performing resolution, it is typically the case that there +/// can be one of three outcomes: +/// +/// - `Ok(Some(r))`: success occurred with result `r` +/// - `Ok(None)`: could not definitely determine anything, usually due +/// to inconclusive type inference. +/// - `Err(e)`: error `e` occurred +pub type SelectionResult<'db, T> = Result, SelectionError<'db>>; + +/// Given the successful resolution of an obligation, the `ImplSource` +/// indicates where the impl comes from. +/// +/// For example, the obligation may be satisfied by a specific impl (case A), +/// or it may be relative to some bound that is in scope (case B). +/// +/// ```ignore (illustrative) +/// impl Clone for Option { ... } // Impl_1 +/// impl Clone for Box { ... } // Impl_2 +/// impl Clone for i32 { ... } // Impl_3 +/// +/// fn foo(concrete: Option>, param: T, mixed: Option) { +/// // Case A: ImplSource points at a specific impl. Only possible when +/// // type is concretely known. If the impl itself has bounded +/// // type parameters, ImplSource will carry resolutions for those as well: +/// concrete.clone(); // ImplSource(Impl_1, [ImplSource(Impl_2, [ImplSource(Impl_3)])]) +/// +/// // Case B: ImplSource must be provided by caller. This applies when +/// // type is a type parameter. +/// param.clone(); // ImplSource::Param +/// +/// // Case C: A mix of cases A and B. +/// mixed.clone(); // ImplSource(Impl_1, [ImplSource::Param]) +/// } +/// ``` +/// +/// ### The type parameter `N` +/// +/// See explanation on `ImplSourceUserDefinedData`. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ImplSource<'db, N> { + /// ImplSource identifying a particular impl. + UserDefined(ImplSourceUserDefinedData<'db, N>), + + /// Successful resolution to an obligation provided by the caller + /// for some type parameter. The `Vec` represents the + /// obligations incurred from normalizing the where-clause (if + /// any). + Param(Vec), + + /// Successful resolution for a builtin impl. + Builtin(BuiltinImplSource, Vec), +} + +impl<'db, N> ImplSource<'db, N> { + pub fn nested_obligations(self) -> Vec { + match self { + ImplSource::UserDefined(i) => i.nested, + ImplSource::Param(n) | ImplSource::Builtin(_, n) => n, + } + } + + pub fn borrow_nested_obligations(&self) -> &[N] { + match self { + ImplSource::UserDefined(i) => &i.nested, + ImplSource::Param(n) | ImplSource::Builtin(_, n) => n, + } + } + + pub fn borrow_nested_obligations_mut(&mut self) -> &mut [N] { + match self { + ImplSource::UserDefined(i) => &mut i.nested, + ImplSource::Param(n) | ImplSource::Builtin(_, n) => n, + } + } + + pub fn map(self, f: F) -> ImplSource<'db, M> + where + F: FnMut(N) -> M, + { + match self { + ImplSource::UserDefined(i) => ImplSource::UserDefined(ImplSourceUserDefinedData { + impl_def_id: i.impl_def_id, + args: i.args, + nested: i.nested.into_iter().map(f).collect(), + }), + ImplSource::Param(n) => ImplSource::Param(n.into_iter().map(f).collect()), + ImplSource::Builtin(source, n) => { + ImplSource::Builtin(source, n.into_iter().map(f).collect()) + } + } + } +} + +/// Identifies a particular impl in the source, along with a set of +/// generic parameters from the impl's type/lifetime parameters. The +/// `nested` vector corresponds to the nested obligations attached to +/// the impl's type parameters. +/// +/// The type parameter `N` indicates the type used for "nested +/// obligations" that are required by the impl. During type-check, this +/// is `Obligation`, as one might expect. During codegen, however, this +/// is `()`, because codegen only requires a shallow resolution of an +/// impl, and nested obligations are satisfied later. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ImplSourceUserDefinedData<'db, N> { + pub impl_def_id: ImplId, + pub args: GenericArgs<'db>, + pub nested: Vec, +} + +pub type Selection<'db> = ImplSource<'db, PredicateObligation<'db>>; + +impl<'db> InferCtxt<'db> { + pub(crate) fn select( + &self, + obligation: &TraitObligation<'db>, + ) -> SelectionResult<'db, Selection<'db>> { + self.visit_proof_tree( + Goal::new(self.interner, obligation.param_env, obligation.predicate), + &mut Select {}, + ) + .break_value() + .unwrap() + } +} + +struct Select {} + +impl<'db> ProofTreeVisitor<'db> for Select { + type Result = ControlFlow>>; + + fn visit_goal(&mut self, goal: &InspectGoal<'_, 'db>) -> Self::Result { + let mut candidates = goal.candidates(); + candidates.retain(|cand| cand.result().is_ok()); + + // No candidates -- not implemented. + if candidates.is_empty() { + return ControlFlow::Break(Err(SelectionError::Unimplemented)); + } + + // One candidate, no need to winnow. + if candidates.len() == 1 { + return ControlFlow::Break(Ok(to_selection(candidates.into_iter().next().unwrap()))); + } + + // Don't winnow until `Certainty::Yes` -- we don't need to winnow until + // codegen, and only on the good path. + if matches!(goal.result().unwrap(), Certainty::Maybe { .. }) { + return ControlFlow::Break(Ok(None)); + } + + // We need to winnow. See comments on `candidate_should_be_dropped_in_favor_of`. + let mut i = 0; + while i < candidates.len() { + let should_drop_i = (0..candidates.len()) + .filter(|&j| i != j) + .any(|j| candidate_should_be_dropped_in_favor_of(&candidates[i], &candidates[j])); + if should_drop_i { + candidates.swap_remove(i); + } else { + i += 1; + if i > 1 { + return ControlFlow::Break(Ok(None)); + } + } + } + + ControlFlow::Break(Ok(to_selection(candidates.into_iter().next().unwrap()))) + } +} + +/// This is a lot more limited than the old solver's equivalent method. This may lead to more `Ok(None)` +/// results when selecting traits in polymorphic contexts, but we should never rely on the lack of ambiguity, +/// and should always just gracefully fail here. We shouldn't rely on this incompleteness. +fn candidate_should_be_dropped_in_favor_of<'db>( + victim: &InspectCandidate<'_, 'db>, + other: &InspectCandidate<'_, 'db>, +) -> bool { + // Don't winnow until `Certainty::Yes` -- we don't need to winnow until + // codegen, and only on the good path. + if matches!(other.result().unwrap(), Certainty::Maybe { .. }) { + return false; + } + + let ProbeKind::TraitCandidate { source: victim_source, result: _ } = victim.kind() else { + return false; + }; + let ProbeKind::TraitCandidate { source: other_source, result: _ } = other.kind() else { + return false; + }; + + match (victim_source, other_source) { + (_, CandidateSource::CoherenceUnknowable) | (CandidateSource::CoherenceUnknowable, _) => { + panic!("should not have assembled a CoherenceUnknowable candidate") + } + + // In the old trait solver, we arbitrarily choose lower vtable candidates + // over higher ones. + ( + CandidateSource::BuiltinImpl(BuiltinImplSource::Object(a)), + CandidateSource::BuiltinImpl(BuiltinImplSource::Object(b)), + ) => a >= b, + ( + CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(a)), + CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(b)), + ) => a >= b, + // Prefer dyn candidates over non-dyn candidates. This is necessary to + // handle the unsoundness between `impl Any for T` and `dyn Any: Any`. + ( + CandidateSource::Impl(_) | CandidateSource::ParamEnv(_) | CandidateSource::AliasBound, + CandidateSource::BuiltinImpl(BuiltinImplSource::Object { .. }), + ) => true, + + // Prefer specializing candidates over specialized candidates. + (CandidateSource::Impl(victim_def_id), CandidateSource::Impl(other_def_id)) => { + victim.goal().infcx().interner.impl_specializes(other_def_id, victim_def_id) + } + + _ => false, + } +} + +fn to_selection<'db>(cand: InspectCandidate<'_, 'db>) -> Option> { + if let Certainty::Maybe { .. } = cand.shallow_certainty() { + return None; + } + + let nested = match cand.result().expect("expected positive result") { + Certainty::Yes => Vec::new(), + Certainty::Maybe { .. } => cand + .instantiate_nested_goals() + .into_iter() + .map(|nested| { + Obligation::new( + nested.infcx().interner, + ObligationCause::dummy(), + nested.goal().param_env, + nested.goal().predicate, + ) + }) + .collect(), + }; + + Some(match cand.kind() { + ProbeKind::TraitCandidate { source, result: _ } => match source { + CandidateSource::Impl(impl_def_id) => { + // FIXME: Remove this in favor of storing this in the tree + // For impl candidates, we do the rematch manually to compute the args. + ImplSource::UserDefined(ImplSourceUserDefinedData { + impl_def_id: impl_def_id.0, + args: cand.instantiate_impl_args(), + nested, + }) + } + CandidateSource::BuiltinImpl(builtin) => ImplSource::Builtin(builtin, nested), + CandidateSource::ParamEnv(_) | CandidateSource::AliasBound => ImplSource::Param(nested), + CandidateSource::CoherenceUnknowable => { + panic!("didn't expect to select an unknowable candidate") + } + }, + ProbeKind::NormalizedSelfTyAssembly + | ProbeKind::UnsizeAssembly + | ProbeKind::ProjectionCompatibility + | ProbeKind::OpaqueTypeStorageLookup { result: _ } + | ProbeKind::Root { result: _ } + | ProbeKind::ShadowedEnvProbing + | ProbeKind::RigidAlias { result: _ } => { + panic!("didn't expect to assemble trait candidate from {:#?}", cand.kind()) + } + }) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs new file mode 100644 index 0000000000000..74353574e3298 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs @@ -0,0 +1,263 @@ +use std::ops::Range; + +use ena::{ + snapshot_vec as sv, + unify::{self as ut, UnifyKey}, +}; +use rustc_type_ir::{ + ConstVid, FloatVid, IntVid, RegionKind, RegionVid, TyVid, TypeFoldable, TypeFolder, + TypeSuperFoldable, TypeVisitableExt, inherent::IntoKind, +}; + +use crate::next_solver::{ + Const, ConstKind, DbInterner, Region, Ty, TyKind, + infer::{ + InferCtxt, UnificationTable, iter_idx_range, + snapshot::VariableLengths, + type_variable::TypeVariableOrigin, + unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey}, + }, +}; + +fn vars_since_snapshot<'db, T>( + table: &UnificationTable<'_, 'db, T>, + snapshot_var_len: usize, +) -> Range +where + T: UnifyKey, + super::UndoLog<'db>: From>>, +{ + T::from_index(snapshot_var_len as u32)..T::from_index(table.len() as u32) +} + +fn const_vars_since_snapshot<'db>( + table: &mut UnificationTable<'_, 'db, ConstVidKey<'db>>, + snapshot_var_len: usize, +) -> (Range, Vec) { + let range = vars_since_snapshot(table, snapshot_var_len); + let range = range.start.vid..range.end.vid; + + ( + range.clone(), + iter_idx_range(range) + .map(|index| match table.probe_value(index) { + ConstVariableValue::Known { value: _ } => { + ConstVariableOrigin { param_def_id: None } + } + ConstVariableValue::Unknown { origin, universe: _ } => origin, + }) + .collect(), + ) +} + +impl<'db> InferCtxt<'db> { + /// This rather funky routine is used while processing expected + /// types. What happens here is that we want to propagate a + /// coercion through the return type of a fn to its + /// argument. Consider the type of `Option::Some`, which is + /// basically `for fn(T) -> Option`. So if we have an + /// expression `Some(&[1, 2, 3])`, and that has the expected type + /// `Option<&[u32]>`, we would like to type check `&[1, 2, 3]` + /// with the expectation of `&[u32]`. This will cause us to coerce + /// from `&[u32; 3]` to `&[u32]` and make the users life more + /// pleasant. + /// + /// The way we do this is using `fudge_inference_if_ok`. What the + /// routine actually does is to start a snapshot and execute the + /// closure `f`. In our example above, what this closure will do + /// is to unify the expectation (`Option<&[u32]>`) with the actual + /// return type (`Option`, where `?T` represents the variable + /// instantiated for `T`). This will cause `?T` to be unified + /// with `&?a [u32]`, where `?a` is a fresh lifetime variable. The + /// input type (`?T`) is then returned by `f()`. + /// + /// At this point, `fudge_inference_if_ok` will normalize all type + /// variables, converting `?T` to `&?a [u32]` and end the + /// snapshot. The problem is that we can't just return this type + /// out, because it references the region variable `?a`, and that + /// region variable was popped when we popped the snapshot. + /// + /// So what we do is to keep a list (`region_vars`, in the code below) + /// of region variables created during the snapshot (here, `?a`). We + /// fold the return value and replace any such regions with a *new* + /// region variable (e.g., `?b`) and return the result (`&?b [u32]`). + /// This can then be used as the expectation for the fn argument. + /// + /// The important point here is that, for soundness purposes, the + /// regions in question are not particularly important. We will + /// use the expected types to guide coercions, but we will still + /// type-check the resulting types from those coercions against + /// the actual types (`?T`, `Option`) -- and remember that + /// after the snapshot is popped, the variable `?T` is no longer + /// unified. + pub fn fudge_inference_if_ok(&self, f: F) -> Result + where + F: FnOnce() -> Result, + T: TypeFoldable>, + { + let variable_lengths = self.variable_lengths(); + let (snapshot_vars, value) = self.probe(|_| { + let value = f()?; + // At this point, `value` could in principle refer + // to inference variables that have been created during + // the snapshot. Once we exit `probe()`, those are + // going to be popped, so we will have to + // eliminate any references to them. + let snapshot_vars = SnapshotVarData::new(self, variable_lengths); + Ok((snapshot_vars, self.resolve_vars_if_possible(value))) + })?; + + // At this point, we need to replace any of the now-popped + // type/region variables that appear in `value` with a fresh + // variable of the appropriate kind. We can't do this during + // the probe because they would just get popped then too. =) + Ok(self.fudge_inference(snapshot_vars, value)) + } + + fn fudge_inference>>( + &self, + snapshot_vars: SnapshotVarData, + value: T, + ) -> T { + // Micro-optimization: if no variables have been created, then + // `value` can't refer to any of them. =) So we can just return it. + if snapshot_vars.is_empty() { + value + } else { + value.fold_with(&mut InferenceFudger { infcx: self, snapshot_vars }) + } + } +} + +struct SnapshotVarData { + region_vars: Range, + type_vars: (Range, Vec), + int_vars: Range, + float_vars: Range, + const_vars: (Range, Vec), +} + +impl SnapshotVarData { + fn new(infcx: &InferCtxt<'_>, vars_pre_snapshot: VariableLengths) -> SnapshotVarData { + let mut inner = infcx.inner.borrow_mut(); + let region_vars = inner + .unwrap_region_constraints() + .vars_since_snapshot(vars_pre_snapshot.region_constraints_len); + let type_vars = inner.type_variables().vars_since_snapshot(vars_pre_snapshot.type_var_len); + let int_vars = + vars_since_snapshot(&inner.int_unification_table(), vars_pre_snapshot.int_var_len); + let float_vars = + vars_since_snapshot(&inner.float_unification_table(), vars_pre_snapshot.float_var_len); + + let const_vars = const_vars_since_snapshot( + &mut inner.const_unification_table(), + vars_pre_snapshot.const_var_len, + ); + SnapshotVarData { region_vars, type_vars, int_vars, float_vars, const_vars } + } + + fn is_empty(&self) -> bool { + let SnapshotVarData { region_vars, type_vars, int_vars, float_vars, const_vars } = self; + region_vars.is_empty() + && type_vars.0.is_empty() + && int_vars.is_empty() + && float_vars.is_empty() + && const_vars.0.is_empty() + } +} + +struct InferenceFudger<'a, 'db> { + infcx: &'a InferCtxt<'db>, + snapshot_vars: SnapshotVarData, +} + +impl<'a, 'db> TypeFolder> for InferenceFudger<'a, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.infcx.interner + } + + fn fold_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { + if let TyKind::Infer(infer_ty) = ty.kind() { + match infer_ty { + rustc_type_ir::TyVar(vid) => { + if self.snapshot_vars.type_vars.0.contains(&vid) { + // This variable was created during the fudging. + // Recreate it with a fresh variable here. + let idx = vid.as_usize() - self.snapshot_vars.type_vars.0.start.as_usize(); + let origin = self.snapshot_vars.type_vars.1[idx]; + self.infcx.next_ty_var_with_origin(origin) + } else { + // This variable was created before the + // "fudging". Since we refresh all type + // variables to their binding anyhow, we know + // that it is unbound, so we can just return + // it. + debug_assert!( + self.infcx.inner.borrow_mut().type_variables().probe(vid).is_unknown() + ); + ty + } + } + rustc_type_ir::IntVar(vid) => { + if self.snapshot_vars.int_vars.contains(&vid) { + self.infcx.next_int_var() + } else { + ty + } + } + rustc_type_ir::FloatVar(vid) => { + if self.snapshot_vars.float_vars.contains(&vid) { + self.infcx.next_float_var() + } else { + ty + } + } + rustc_type_ir::FreshTy(_) + | rustc_type_ir::FreshIntTy(_) + | rustc_type_ir::FreshFloatTy(_) => { + unreachable!("unexpected fresh infcx var") + } + } + } else if ty.has_infer() { + ty.super_fold_with(self) + } else { + ty + } + } + + fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { + if let RegionKind::ReVar(vid) = r.kind() { + if self.snapshot_vars.region_vars.contains(&vid) { + let idx = vid.index() - self.snapshot_vars.region_vars.start.index(); + self.infcx.next_region_var() + } else { + r + } + } else { + r + } + } + + fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> { + if let ConstKind::Infer(infer_ct) = ct.kind() { + match infer_ct { + rustc_type_ir::InferConst::Var(vid) => { + if self.snapshot_vars.const_vars.0.contains(&vid) { + let idx = vid.index() - self.snapshot_vars.const_vars.0.start.index(); + let origin = self.snapshot_vars.const_vars.1[idx]; + self.infcx.next_const_var_with_origin(origin) + } else { + ct + } + } + rustc_type_ir::InferConst::Fresh(_) => { + unreachable!("unexpected fresh infcx var") + } + } + } else if ct.has_infer() { + ct.super_fold_with(self) + } else { + ct + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/mod.rs new file mode 100644 index 0000000000000..7b9ca96c51406 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/mod.rs @@ -0,0 +1,112 @@ +//! Snapshotting in the infer ctxt of the next-trait-solver. + +use ena::undo_log::UndoLogs; +use rustc_type_ir::UniverseIndex; +use tracing::{debug, instrument}; + +use super::InferCtxt; +use super::region_constraints::RegionSnapshot; + +mod fudge; +pub(crate) mod undo_log; + +use undo_log::{Snapshot, UndoLog}; + +#[must_use = "once you start a snapshot, you should always consume it"] +pub struct CombinedSnapshot { + pub(super) undo_snapshot: Snapshot, + region_constraints_snapshot: RegionSnapshot, + universe: UniverseIndex, +} + +struct VariableLengths { + region_constraints_len: usize, + type_var_len: usize, + int_var_len: usize, + float_var_len: usize, + const_var_len: usize, +} + +impl<'db> InferCtxt<'db> { + fn variable_lengths(&self) -> VariableLengths { + let mut inner = self.inner.borrow_mut(); + VariableLengths { + region_constraints_len: inner.unwrap_region_constraints().num_region_vars(), + type_var_len: inner.type_variables().num_vars(), + int_var_len: inner.int_unification_table().len(), + float_var_len: inner.float_unification_table().len(), + const_var_len: inner.const_unification_table().len(), + } + } + + pub fn in_snapshot(&self) -> bool { + UndoLogs::>::in_snapshot(&self.inner.borrow_mut().undo_log) + } + + pub fn num_open_snapshots(&self) -> usize { + UndoLogs::>::num_open_snapshots(&self.inner.borrow_mut().undo_log) + } + + pub(crate) fn start_snapshot(&self) -> CombinedSnapshot { + debug!("start_snapshot()"); + + let mut inner = self.inner.borrow_mut(); + + CombinedSnapshot { + undo_snapshot: inner.undo_log.start_snapshot(), + region_constraints_snapshot: inner.unwrap_region_constraints().start_snapshot(), + universe: self.universe(), + } + } + + #[instrument(skip(self, snapshot), level = "debug")] + pub(crate) fn rollback_to(&self, snapshot: CombinedSnapshot) { + let CombinedSnapshot { undo_snapshot, region_constraints_snapshot, universe } = snapshot; + + self.universe.set(universe); + + let mut inner = self.inner.borrow_mut(); + inner.rollback_to(undo_snapshot); + inner.unwrap_region_constraints().rollback_to(region_constraints_snapshot); + } + + #[instrument(skip(self, snapshot), level = "debug")] + fn commit_from(&self, snapshot: CombinedSnapshot) { + let CombinedSnapshot { undo_snapshot, region_constraints_snapshot: _, universe: _ } = + snapshot; + + self.inner.borrow_mut().commit(undo_snapshot); + } + + /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`. + #[instrument(skip(self, f), level = "debug")] + pub fn commit_if_ok(&self, f: F) -> Result + where + F: FnOnce(&CombinedSnapshot) -> Result, + { + let snapshot = self.start_snapshot(); + let r = f(&snapshot); + debug!("commit_if_ok() -- r.is_ok() = {}", r.is_ok()); + match r { + Ok(_) => { + self.commit_from(snapshot); + } + Err(_) => { + self.rollback_to(snapshot); + } + } + r + } + + /// Execute `f` then unroll any bindings it creates. + #[instrument(skip(self, f), level = "debug")] + pub fn probe(&self, f: F) -> R + where + F: FnOnce(&CombinedSnapshot) -> R, + { + let snapshot = self.start_snapshot(); + let r = f(&snapshot); + self.rollback_to(snapshot); + r + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs new file mode 100644 index 0000000000000..05a1013b3fbd5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs @@ -0,0 +1,204 @@ +//! Snapshotting in the infer ctxt of the next-trait-solver. + +use std::marker::PhantomData; + +use ena::snapshot_vec as sv; +use ena::undo_log::{Rollback, UndoLogs}; +use ena::unify as ut; +use rustc_type_ir::FloatVid; +use rustc_type_ir::IntVid; +use tracing::debug; + +use crate::next_solver::OpaqueTypeKey; +use crate::next_solver::infer::opaque_types::OpaqueHiddenType; +use crate::next_solver::infer::unify_key::ConstVidKey; +use crate::next_solver::infer::unify_key::RegionVidKey; +use crate::next_solver::infer::{InferCtxtInner, region_constraints, type_variable}; +use crate::traits; + +pub struct Snapshot { + pub(crate) undo_len: usize, +} + +/// Records the "undo" data for a single operation that affects some form of inference variable. +#[derive(Clone)] +pub(crate) enum UndoLog<'db> { + DuplicateOpaqueType, + OpaqueTypes(OpaqueTypeKey<'db>, Option>), + TypeVariables(type_variable::UndoLog<'db>), + ConstUnificationTable(sv::UndoLog>>), + IntUnificationTable(sv::UndoLog>), + FloatUnificationTable(sv::UndoLog>), + RegionConstraintCollector(region_constraints::UndoLog<'db>), + RegionUnificationTable(sv::UndoLog>>), + PushRegionObligation, +} + +macro_rules! impl_from { + ($($ctor:ident ($ty:ty),)*) => { + $( + impl<'db> From<$ty> for UndoLog<'db> { + fn from(x: $ty) -> Self { + UndoLog::$ctor(x.into()) + } + } + )* + } +} + +// Upcast from a single kind of "undoable action" to the general enum +impl_from! { + RegionConstraintCollector(region_constraints::UndoLog<'db>), + + TypeVariables(sv::UndoLog>>), + TypeVariables(sv::UndoLog>), + TypeVariables(type_variable::UndoLog<'db>), + IntUnificationTable(sv::UndoLog>), + FloatUnificationTable(sv::UndoLog>), + + ConstUnificationTable(sv::UndoLog>>), + + RegionUnificationTable(sv::UndoLog>>), +} + +/// The Rollback trait defines how to rollback a particular action. +impl<'db> Rollback> for InferCtxtInner<'db> { + fn reverse(&mut self, undo: UndoLog<'db>) { + match undo { + UndoLog::DuplicateOpaqueType => self.opaque_type_storage.pop_duplicate_entry(), + UndoLog::OpaqueTypes(key, idx) => self.opaque_type_storage.remove(key, idx), + UndoLog::TypeVariables(undo) => self.type_variable_storage.reverse(undo), + UndoLog::ConstUnificationTable(undo) => self.const_unification_storage.reverse(undo), + UndoLog::IntUnificationTable(undo) => self.int_unification_storage.reverse(undo), + UndoLog::FloatUnificationTable(undo) => self.float_unification_storage.reverse(undo), + UndoLog::RegionConstraintCollector(undo) => { + self.region_constraint_storage.as_mut().unwrap().reverse(undo) + } + UndoLog::RegionUnificationTable(undo) => { + self.region_constraint_storage.as_mut().unwrap().unification_table.reverse(undo) + } + UndoLog::PushRegionObligation => { + self.region_obligations.pop(); + } + } + } +} + +/// The combined undo log for all the various unification tables. For each change to the storage +/// for any kind of inference variable, we record an UndoLog entry in the vector here. +#[derive(Clone, Default)] +pub(crate) struct InferCtxtUndoLogs<'db> { + logs: Vec>, + num_open_snapshots: usize, +} + +/// The UndoLogs trait defines how we undo a particular kind of action (of type T). We can undo any +/// action that is convertible into an UndoLog (per the From impls above). +impl<'db, T> UndoLogs for InferCtxtUndoLogs<'db> +where + UndoLog<'db>: From, +{ + #[inline] + fn num_open_snapshots(&self) -> usize { + self.num_open_snapshots + } + + #[inline] + fn push(&mut self, undo: T) { + if self.in_snapshot() { + self.logs.push(undo.into()) + } + } + + fn clear(&mut self) { + self.logs.clear(); + self.num_open_snapshots = 0; + } + + fn extend(&mut self, undos: J) + where + Self: Sized, + J: IntoIterator, + { + if self.in_snapshot() { + self.logs.extend(undos.into_iter().map(UndoLog::from)) + } + } +} + +impl<'db> InferCtxtInner<'db> { + pub fn rollback_to(&mut self, snapshot: Snapshot) { + debug!("rollback_to({})", snapshot.undo_len); + self.undo_log.assert_open_snapshot(&snapshot); + + while self.undo_log.logs.len() > snapshot.undo_len { + let undo = self.undo_log.logs.pop().unwrap(); + self.reverse(undo); + } + + self.type_variable_storage.finalize_rollback(); + + if self.undo_log.num_open_snapshots == 1 { + // After the root snapshot the undo log should be empty. + assert!(snapshot.undo_len == 0); + assert!(self.undo_log.logs.is_empty()); + } + + self.undo_log.num_open_snapshots -= 1; + } + + pub fn commit(&mut self, snapshot: Snapshot) { + debug!("commit({})", snapshot.undo_len); + + if self.undo_log.num_open_snapshots == 1 { + // The root snapshot. It's safe to clear the undo log because + // there's no snapshot further out that we might need to roll back + // to. + assert!(snapshot.undo_len == 0); + self.undo_log.logs.clear(); + } + + self.undo_log.num_open_snapshots -= 1; + } +} + +impl<'db> InferCtxtUndoLogs<'db> { + pub(crate) fn start_snapshot(&mut self) -> Snapshot { + self.num_open_snapshots += 1; + Snapshot { undo_len: self.logs.len() } + } + + pub(crate) fn region_constraints_in_snapshot( + &self, + s: &Snapshot, + ) -> impl Iterator> + Clone { + self.logs[s.undo_len..].iter().filter_map(|log| match log { + UndoLog::RegionConstraintCollector(log) => Some(log), + _ => None, + }) + } + + pub(crate) fn opaque_types_in_snapshot(&self, s: &Snapshot) -> bool { + self.logs[s.undo_len..].iter().any(|log| matches!(log, UndoLog::OpaqueTypes(..))) + } + + fn assert_open_snapshot(&self, snapshot: &Snapshot) { + // Failures here may indicate a failure to follow a stack discipline. + assert!(self.logs.len() >= snapshot.undo_len); + assert!(self.num_open_snapshots > 0); + } +} + +impl<'db> std::ops::Index for InferCtxtUndoLogs<'db> { + type Output = UndoLog<'db>; + + fn index(&self, key: usize) -> &Self::Output { + &self.logs[key] + } +} + +impl<'db> std::ops::IndexMut for InferCtxtUndoLogs<'db> { + fn index_mut(&mut self, key: usize) -> &mut Self::Output { + &mut self.logs[key] + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs new file mode 100644 index 0000000000000..68aa12d7bb0cb --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs @@ -0,0 +1,239 @@ +//! Trait Resolution. See the [rustc-dev-guide] for more information on how this works. +//! +//! [rustc-dev-guide]: https://rustc-dev-guide.rust-lang.org/traits/resolution.html + +use std::{ + cmp, + hash::{Hash, Hasher}, +}; + +use rustc_type_ir::elaborate::Elaboratable; +use rustc_type_ir::{ + PredicatePolarity, Upcast, + solve::{Certainty, NoSolution}, +}; +use rustc_type_ir::{TypeFoldable, TypeVisitable}; + +use crate::next_solver::{ + Binder, Clause, DbInterner, Goal, ParamEnv, PolyTraitPredicate, Predicate, SolverDefId, Span, + TraitPredicate, Ty, +}; + +use super::InferCtxt; + +/// The reason why we incurred this obligation; used for error reporting. +/// +/// Non-misc `ObligationCauseCode`s are stored on the heap. This gives the +/// best trade-off between keeping the type small (which makes copies cheaper) +/// while not doing too many heap allocations. +/// +/// We do not want to intern this as there are a lot of obligation causes which +/// only live for a short period of time. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ObligationCause { + // FIXME: This should contain an `ExprId`/`PatId` etc., and a cause code. But for now we + // don't report trait solving diagnostics, so this is irrelevant. + _private: (), +} + +impl ObligationCause { + #[expect( + clippy::new_without_default, + reason = "`new` is temporary, eventually we will provide span etc. here" + )] + #[inline] + pub fn new() -> ObligationCause { + ObligationCause { _private: () } + } + + #[inline] + pub fn dummy() -> ObligationCause { + ObligationCause::new() + } + + #[inline] + pub fn misc() -> ObligationCause { + ObligationCause::new() + } +} + +/// An `Obligation` represents some trait reference (e.g., `i32: Eq`) for +/// which the "impl_source" must be found. The process of finding an "impl_source" is +/// called "resolving" the `Obligation`. This process consists of +/// either identifying an `impl` (e.g., `impl Eq for i32`) that +/// satisfies the obligation, or else finding a bound that is in +/// scope. The eventual result is usually a `Selection` (defined below). +#[derive(Clone, Debug)] +pub struct Obligation<'db, T> { + /// The reason we have to prove this thing. + pub cause: ObligationCause, + + /// The environment in which we should prove this thing. + pub param_env: ParamEnv<'db>, + + /// The thing we are trying to prove. + pub predicate: T, + + /// If we started proving this as a result of trying to prove + /// something else, track the total depth to ensure termination. + /// If this goes over a certain threshold, we abort compilation -- + /// in such cases, we can not say whether or not the predicate + /// holds for certain. Stupid halting problem; such a drag. + pub recursion_depth: usize, +} + +/// For [`Obligation`], a sub-obligation is combined with the current obligation's +/// param-env and cause code. +impl<'db> Elaboratable> for PredicateObligation<'db> { + fn predicate(&self) -> Predicate<'db> { + self.predicate + } + + fn child(&self, clause: Clause<'db>) -> Self { + Obligation { + cause: self.cause.clone(), + param_env: self.param_env, + recursion_depth: 0, + predicate: clause.as_predicate(), + } + } + + fn child_with_derived_cause( + &self, + clause: Clause<'db>, + span: Span, + parent_trait_pred: PolyTraitPredicate<'db>, + index: usize, + ) -> Self { + let cause = ObligationCause::new(); + Obligation { + cause, + param_env: self.param_env, + recursion_depth: 0, + predicate: clause.as_predicate(), + } + } +} + +impl<'db, T: TypeVisitable>> TypeVisitable> for Obligation<'db, T> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + rustc_ast_ir::try_visit!(self.param_env.visit_with(visitor)); + self.predicate.visit_with(visitor) + } +} + +impl<'db, T: TypeFoldable>> TypeFoldable> for Obligation<'db, T> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(Obligation { + cause: self.cause.clone(), + param_env: self.param_env.try_fold_with(folder)?, + predicate: self.predicate.try_fold_with(folder)?, + recursion_depth: self.recursion_depth, + }) + } + + fn fold_with>>(self, folder: &mut F) -> Self { + Obligation { + cause: self.cause.clone(), + param_env: self.param_env.fold_with(folder), + predicate: self.predicate.fold_with(folder), + recursion_depth: self.recursion_depth, + } + } +} + +impl<'db, T: Copy> Obligation<'db, T> { + pub fn as_goal(&self) -> Goal<'db, T> { + Goal { param_env: self.param_env, predicate: self.predicate } + } +} + +impl<'db, T: PartialEq> PartialEq> for Obligation<'db, T> { + #[inline] + fn eq(&self, other: &Obligation<'db, T>) -> bool { + // Ignore `cause` and `recursion_depth`. This is a small performance + // win for a few crates, and a huge performance win for the crate in + // https://github.com/rust-lang/rustc-perf/pull/1680, which greatly + // stresses the trait system. + self.param_env == other.param_env && self.predicate == other.predicate + } +} + +impl<'db, T: Eq> Eq for Obligation<'db, T> {} + +impl<'db, T: Hash> Hash for Obligation<'db, T> { + fn hash(&self, state: &mut H) { + // See the comment on `Obligation::eq`. + self.param_env.hash(state); + self.predicate.hash(state); + } +} + +impl<'db, P> From> for Goal<'db, P> { + fn from(value: Obligation<'db, P>) -> Self { + Goal { param_env: value.param_env, predicate: value.predicate } + } +} + +pub type PredicateObligation<'db> = Obligation<'db, Predicate<'db>>; +pub type TraitObligation<'db> = Obligation<'db, TraitPredicate<'db>>; + +pub type PredicateObligations<'db> = Vec>; + +impl<'db> PredicateObligation<'db> { + /// Flips the polarity of the inner predicate. + /// + /// Given `T: Trait` predicate it returns `T: !Trait` and given `T: !Trait` returns `T: Trait`. + pub fn flip_polarity(&self, tcx: DbInterner<'db>) -> Option> { + Some(PredicateObligation { + cause: self.cause.clone(), + param_env: self.param_env, + predicate: self.predicate.flip_polarity()?, + recursion_depth: self.recursion_depth, + }) + } +} + +impl<'db, O> Obligation<'db, O> { + pub fn new( + tcx: DbInterner<'db>, + cause: ObligationCause, + param_env: ParamEnv<'db>, + predicate: impl Upcast, O>, + ) -> Obligation<'db, O> { + Self::with_depth(tcx, cause, 0, param_env, predicate) + } + + /// We often create nested obligations without setting the correct depth. + /// + /// To deal with this evaluate and fulfill explicitly update the depth + /// of nested obligations using this function. + pub fn set_depth_from_parent(&mut self, parent_depth: usize) { + self.recursion_depth = cmp::max(parent_depth + 1, self.recursion_depth); + } + + pub fn with_depth( + tcx: DbInterner<'db>, + cause: ObligationCause, + recursion_depth: usize, + param_env: ParamEnv<'db>, + predicate: impl Upcast, O>, + ) -> Obligation<'db, O> { + let predicate = predicate.upcast(tcx); + Obligation { cause, param_env, recursion_depth, predicate } + } + + pub fn with
// This is a regular comment
+/// This is a doc comment
+fn main() {
+    // Another comment
+    println!("Hello, world!");
+}
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html index 00925bd81ed8e..d99b29cfb8fa6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -127,9 +127,9 @@ let y = &mut x; let z = &y; - let Foo { x: z, y } = Foo { x: z, y }; + let Foo { x: z, y } = Foo { x: z, y }; - y; + y; let mut foo = Foo { x, y: x }; let foo2 = Foo { x, y: x }; @@ -142,7 +142,7 @@ copy.qux(); copy.baz(copy); - let a = |x| x; + let a = |x| x; let bar = Foo::baz; let baz = (-42,); @@ -172,13 +172,13 @@ } async fn learn_and_sing() { - let song = learn_song().await; - sing_song(song).await; + let song = learn_song().await; + sing_song(song).await; } async fn async_main() { let f1 = learn_and_sing(); - let f2 = dance(); + let f2 = dance(); futures::join!(f1, f2); } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index f7d798208037e..47ee2ad1c0d70 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -78,8 +78,8 @@ } use foo::bar as baz; -trait Bar = Baz; -trait Foo = Bar; +trait Bar = Baz; +trait Foo = Bar; fn main() { let a = '\n'; diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index dd359326c61d6..8198701d68432 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -9,6 +9,7 @@ use crate::{FileRange, HighlightConfig, HlTag, TextRange, fixture}; const HL_CONFIG: HighlightConfig = HighlightConfig { strings: true, + comments: true, punctuation: true, specialize_punctuation: true, specialize_operator: true, @@ -1220,16 +1221,25 @@ fn foo(x: &fn(&dyn Trait)) {} /// Highlights the code given by the `ra_fixture` argument, renders the /// result as HTML, and compares it with the HTML file given as `snapshot`. /// Note that the `snapshot` file is overwritten by the rendered HTML. -fn check_highlighting( +fn check_highlighting_with_config( #[rust_analyzer::rust_fixture] ra_fixture: &str, + config: HighlightConfig, expect: ExpectFile, rainbow: bool, ) { let (analysis, file_id) = fixture::file(ra_fixture.trim()); - let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); + let actual_html = &analysis.highlight_as_html_with_config(config, file_id, rainbow).unwrap(); expect.assert_eq(actual_html) } +fn check_highlighting( + #[rust_analyzer::rust_fixture] ra_fixture: &str, + expect: ExpectFile, + rainbow: bool, +) { + check_highlighting_with_config(ra_fixture, HL_CONFIG, expect, rainbow) +} + #[test] fn benchmark_syntax_highlighting_long_struct() { if skip_slow_tests() { @@ -1435,3 +1445,24 @@ fn main() { false, ); } + +#[test] +fn test_comment_highlighting_disabled() { + // Test that comments are not highlighted when disabled + check_highlighting_with_config( + r#" +// This is a regular comment +/// This is a doc comment +fn main() { + // Another comment + println!("Hello, world!"); +} +"#, + HighlightConfig { + comments: false, // Disable comment highlighting + ..HL_CONFIG + }, + expect_file!["./test_data/highlight_comments_disabled.html"], + false, + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs index 06cbd50e946ac..bd60ffe559120 100644 --- a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs +++ b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs @@ -94,7 +94,7 @@ fn discover_tests_in_module( if !f.is_test(db) { continue; } - let nav = f.try_to_nav(db).map(|r| r.call_site); + let nav = f.try_to_nav(&sema).map(|r| r.call_site); let fn_name = f.name(db).as_str().to_owned(); r.push(TestItem { id: format!("{prefix_id}::{fn_name}"), diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs index 63701a4d15e94..950f3f6c64706 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs @@ -3,6 +3,7 @@ use std::fmt; use hir::{DisplayTarget, Field, HirDisplay, Layout, Semantics, Type}; use ide_db::{ RootDatabase, + base_db::salsa, defs::Definition, helpers::{get_definition, pick_best_token}, }; @@ -138,10 +139,12 @@ pub(crate) fn view_memory_layout( nodes[parent_idx].children_len = fields.len() as u64; for (field, child_ty) in fields.iter() { - if let Ok(child_layout) = child_ty.layout(db) { + if let Ok(child_layout) = salsa::attach(db, || child_ty.layout(db)) { nodes.push(MemoryLayoutNode { item_name: field.name(db), - typename: child_ty.display(db, display_target).to_string(), + typename: salsa::attach(db, || { + child_ty.display(db, display_target).to_string() + }), size: child_layout.size(), alignment: child_layout.align(), offset: match *field { @@ -169,13 +172,13 @@ pub(crate) fn view_memory_layout( } for (i, (_, child_ty)) in fields.iter().enumerate() { - if let Ok(child_layout) = child_ty.layout(db) { + if let Ok(child_layout) = salsa::attach(db, || child_ty.layout(db)) { read_layout(nodes, db, child_ty, &child_layout, children_start + i, display_target); } } } - ty.layout(db) + salsa::attach(db, || ty.layout(db)) .map(|layout| { let item_name = match def { // def is a datatype @@ -188,7 +191,7 @@ pub(crate) fn view_memory_layout( def => def.name(db).map(|n| n.as_str().to_owned()).unwrap_or("[ROOT]".to_owned()), }; - let typename = ty.display(db, display_target).to_string(); + let typename = salsa::attach(db, || ty.display(db, display_target).to_string()); let mut nodes = vec![MemoryLayoutNode { item_name, diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index 4780743c4d92a..1db4f8ecd6bab 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -178,6 +178,8 @@ define_symbols! { core, coroutine_state, coroutine, + coroutine_return, + coroutine_yield, count, crate_type, CStr, @@ -226,6 +228,8 @@ define_symbols! { async_fn_once_output, async_fn_mut, async_fn, + call_ref_future, + call_once_future, fn_ptr_addr, fn_ptr_trait, format_alignment, @@ -416,6 +420,7 @@ define_symbols! { rustc_allow_incoherent_impl, rustc_builtin_macro, rustc_coherence_is_core, + rustc_coinductive, rustc_const_panic_str, rustc_deallocator, rustc_deprecated_safe_2024, @@ -432,6 +437,7 @@ define_symbols! { rustc_safe_intrinsic, rustc_skip_array_during_method_dispatch, rustc_skip_during_method_dispatch, + rustc_force_inline, semitransparent, shl_assign, shl, diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs index cb1b59f649774..d419817e5cd70 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs @@ -182,12 +182,6 @@ fn type_bound(p: &mut Parser<'_>) -> bool { ); m.complete(p, USE_BOUND_GENERIC_ARGS); } - T![?] if p.nth_at(1, T![for]) => { - // test question_for_type_trait_bound - // fn f() where T: ?for<> Sized {} - p.bump_any(); - types::for_type(p, false) - } _ => { if path_type_bound(p).is_err() { m.abandon(p); @@ -219,8 +213,13 @@ fn path_type_bound(p: &mut Parser<'_>) -> Result<(), ()> { // test async_trait_bound // fn async_foo(_: impl async Fn(&i32)) {} p.eat(T![async]); + // test question_for_type_trait_bound + // fn f() where T: for<> ?Sized {} p.eat(T![?]); + // test_err invalid_question_for_type_trait_bound + // fn f() where T: ?for<> Sized {} + if paths::is_use_path_start(p) { types::path_type_bounds(p, false); // test_err type_bounds_macro_call_recovery diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs index 47f86ce8c6cc4..c1b1a3fc8a94a 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/traits.rs @@ -20,7 +20,7 @@ pub(super) fn trait_(p: &mut Parser<'_>, m: Marker) { // trait Z = where Self: T; generic_params::opt_where_clause(p); p.expect(T![;]); - m.complete(p, TRAIT_ALIAS); + m.complete(p, TRAIT); return; } diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index 8fff1c3db7485..edc3f406a67e8 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -149,6 +149,24 @@ impl<'a> Converter<'a> { } } + /// Check for likely unterminated string by analyzing STRING token content + fn has_likely_unterminated_string(&self) -> bool { + let Some(last_idx) = self.res.kind.len().checked_sub(1) else { return false }; + + for i in (0..=last_idx).rev().take(5) { + if self.res.kind[i] == STRING { + let start = self.res.start[i] as usize; + let end = self.res.start.get(i + 1).map(|&s| s as usize).unwrap_or(self.offset); + let content = &self.res.text[start..end]; + + if content.contains('(') && (content.contains("//") || content.contains(";\n")) { + return true; + } + } + } + false + } + fn finalize_with_eof(mut self) -> LexedStr<'a> { self.res.push(EOF, self.offset); self.res @@ -267,7 +285,16 @@ impl<'a> Converter<'a> { rustc_lexer::TokenKind::Unknown => ERROR, rustc_lexer::TokenKind::UnknownPrefix if token_text == "builtin" => IDENT, rustc_lexer::TokenKind::UnknownPrefix => { - errors.push("unknown literal prefix".into()); + let has_unterminated = self.has_likely_unterminated_string(); + + let error_msg = if has_unterminated { + format!( + "unknown literal prefix `{token_text}` (note: check for unterminated string literal)" + ) + } else { + "unknown literal prefix".to_owned() + }; + errors.push(error_msg); IDENT } rustc_lexer::TokenKind::Eof => EOF, diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 3a8041d2df9ee..93e02a92abdad 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -284,7 +284,6 @@ pub enum SyntaxKind { STRUCT, TOKEN_TREE, TRAIT, - TRAIT_ALIAS, TRY_EXPR, TUPLE_EXPR, TUPLE_FIELD, @@ -457,7 +456,6 @@ impl SyntaxKind { | STRUCT | TOKEN_TREE | TRAIT - | TRAIT_ALIAS | TRY_EXPR | TUPLE_EXPR | TUPLE_FIELD diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index c642e1a3354fc..a3cfe64e6e739 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -796,6 +796,12 @@ mod err { #[test] fn impl_type() { run_and_expect_errors("test_data/parser/inline/err/impl_type.rs"); } #[test] + fn invalid_question_for_type_trait_bound() { + run_and_expect_errors( + "test_data/parser/inline/err/invalid_question_for_type_trait_bound.rs", + ); + } + #[test] fn let_else_right_curly_brace() { run_and_expect_errors("test_data/parser/inline/err/let_else_right_curly_brace.rs"); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unterminated_string_unknown_prefix.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unterminated_string_unknown_prefix.rast new file mode 100644 index 0000000000000..f7f24ca3f810a --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unterminated_string_unknown_prefix.rast @@ -0,0 +1,15 @@ +FN_KW "fn" +WHITESPACE " " +IDENT "main" +L_PAREN "(" +R_PAREN ")" +WHITESPACE " " +L_CURLY "{" +WHITESPACE "\n " +IDENT "hello" +L_PAREN "(" +STRING "\"world);\n // a bunch of code was here\n env(\"FLAGS" +STRING "\", \"" +MINUS "-" +IDENT "help" error: unknown literal prefix `help` (note: check for unterminated string literal) +STRING "\")\n}" error: Missing trailing `"` symbol to terminate the string literal diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unterminated_string_unknown_prefix.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unterminated_string_unknown_prefix.rs new file mode 100644 index 0000000000000..338b9582605bd --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/unterminated_string_unknown_prefix.rs @@ -0,0 +1,5 @@ +fn main() { + hello("world); + // a bunch of code was here + env("FLAGS", "-help") +} \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/invalid_question_for_type_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/invalid_question_for_type_trait_bound.rast new file mode 100644 index 0000000000000..b060ee81d6178 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/invalid_question_for_type_trait_bound.rast @@ -0,0 +1,49 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "f" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "T" + R_ANGLE ">" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + WHERE_CLAUSE + WHERE_KW "where" + WHITESPACE " " + WHERE_PRED + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "T" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + QUESTION "?" + WHERE_PRED + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Sized" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" +error 20: expected comma +error 31: expected colon diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/invalid_question_for_type_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/invalid_question_for_type_trait_bound.rs new file mode 100644 index 0000000000000..f80dd90d446c5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/invalid_question_for_type_trait_bound.rs @@ -0,0 +1 @@ +fn f() where T: ?for<> Sized {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast index cb296153c8f1a..69db1aee2c5a5 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast @@ -27,19 +27,18 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND_LIST TYPE_BOUND + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + R_ANGLE ">" + WHITESPACE " " QUESTION "?" - FOR_TYPE - FOR_BINDER - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Sized" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Sized" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs index f80dd90d446c5..96353df3b9b33 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs @@ -1 +1 @@ -fn f() where T: ?for<> Sized {} +fn f() where T: for<> ?Sized {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rast index c45f870898007..2ef66484ae48f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias.rast @@ -1,5 +1,5 @@ SOURCE_FILE - TRAIT_ALIAS + TRAIT TRAIT_KW "trait" WHITESPACE " " NAME diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rast index 8f678247731dc..4443d9d142630 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/trait_alias_where_clause.rast @@ -1,5 +1,5 @@ SOURCE_FILE - TRAIT_ALIAS + TRAIT TRAIT_KW "trait" WHITESPACE " " NAME @@ -50,7 +50,7 @@ SOURCE_FILE IDENT "Copy" SEMICOLON ";" WHITESPACE "\n" - TRAIT_ALIAS + TRAIT TRAIT_KW "trait" WHITESPACE " " NAME diff --git a/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs index 9f3e636ef816a..00c37c01d25e2 100644 --- a/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs +++ b/src/tools/rust-analyzer/crates/profile/src/stop_watch.rs @@ -37,10 +37,10 @@ impl StopWatch { .build() .map_err(|err| eprintln!("Failed to create perf counter: {err}")) .ok(); - if let Some(counter) = &mut counter { - if let Err(err) = counter.enable() { - eprintln!("Failed to start perf counter: {err}") - } + if let Some(counter) = &mut counter + && let Err(err) = counter.enable() + { + eprintln!("Failed to start perf counter: {err}") } counter } else { diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index 5bea74bed7ed2..203173c11be40 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -347,9 +347,7 @@ impl WorkspaceBuildScripts { match message { Message::BuildScriptExecuted(mut message) => { with_output_for(&message.package_id.repr, &mut |name, data| { - progress(format!( - "building compile-time-deps: build script {name} run" - )); + progress(format!("build script {name} run")); let cfgs = { let mut acc = Vec::new(); for cfg in &message.cfgs { @@ -380,9 +378,7 @@ impl WorkspaceBuildScripts { } Message::CompilerArtifact(message) => { with_output_for(&message.package_id.repr, &mut |name, data| { - progress(format!( - "building compile-time-deps: proc-macro {name} built" - )); + progress(format!("proc-macro {name} built")); if data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt { data.proc_macro_dylib_path = ProcMacroDylibPath::NotProcMacro; } diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index d39781b15066d..e36b904881513 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -18,7 +18,7 @@ pub mod project_json; pub mod toolchain_info { pub mod rustc_cfg; - pub mod target_data_layout; + pub mod target_data; pub mod target_tuple; pub mod version; diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index ed72520f40d4d..987d381fac638 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -9,7 +9,6 @@ use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::de::DeserializeOwned; use span::FileId; -use triomphe::Arc; use crate::{ CargoWorkspace, CfgOverrides, ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, @@ -47,7 +46,7 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace { sysroot: Sysroot::empty(), rustc_cfg: Vec::new(), toolchain: None, - target_layout: Err("target_data_layout not loaded".into()), + target: Err("target_data_layout not loaded".into()), extra_includes: Vec::new(), set_test: true, } @@ -62,7 +61,7 @@ fn load_rust_project(file: &str) -> (CrateGraphBuilder, ProcMacroPaths) { sysroot, rustc_cfg: Vec::new(), toolchain: None, - target_layout: Err(Arc::from("test has no data layout")), + target: Err("test has no target data".into()), cfg_overrides: Default::default(), extra_includes: Vec::new(), set_test: true, @@ -265,7 +264,7 @@ fn smoke_test_real_sysroot_cargo() { rustc_cfg: Vec::new(), cfg_overrides: Default::default(), toolchain: None, - target_layout: Err("target_data_layout not loaded".into()), + target: Err("target_data_layout not loaded".into()), extra_includes: Vec::new(), set_test: true, }; diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data.rs similarity index 70% rename from src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs rename to src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data.rs index a28f468e692d0..b815c0b79718e 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data.rs @@ -1,36 +1,64 @@ //! Runs `rustc --print target-spec-json` to get the target_data_layout. use anyhow::Context; +use base_db::target; use rustc_hash::FxHashMap; +use serde_derive::Deserialize; use toolchain::Tool; use crate::{Sysroot, toolchain_info::QueryConfig, utf8_stdout}; +#[derive(Debug, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum Arch { + Wasm32, + Wasm64, + #[serde(other)] + Other, +} + +impl From for target::Arch { + fn from(value: Arch) -> Self { + match value { + Arch::Wasm32 => target::Arch::Wasm32, + Arch::Wasm64 => target::Arch::Wasm64, + Arch::Other => target::Arch::Other, + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct TargetSpec { + pub data_layout: String, + pub arch: Arch, +} + /// Uses `rustc --print target-spec-json`. pub fn get( config: QueryConfig<'_>, target: Option<&str>, extra_env: &FxHashMap>, -) -> anyhow::Result { +) -> anyhow::Result { const RUSTC_ARGS: [&str; 2] = ["--print", "target-spec-json"]; let process = |output: String| { - (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() - .ok_or_else(|| { - anyhow::format_err!("could not parse target-spec-json from command output") - }) + let target_spec = serde_json::from_str::(&output).map_err(|_| { + anyhow::format_err!("could not parse target-spec-json from command output") + })?; + Ok(target::TargetData { + arch: target_spec.arch.into(), + data_layout: target_spec.data_layout.into_boxed_str(), + }) }; let (sysroot, current_dir) = match config { QueryConfig::Cargo(sysroot, cargo_toml, _) => { let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env); cmd.env("RUSTC_BOOTSTRAP", "1"); - cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS).args([ - "--", - "-Z", - "unstable-options", - ]); + cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS); if let Some(target) = target { cmd.args(["--target", target]); } + cmd.args(["--", "-Z", "unstable-options"]); match utf8_stdout(&mut cmd) { Ok(output) => return process(output), Err(e) => { diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 5b36e10fd6925..e0d2105c8df89 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -8,7 +8,7 @@ use anyhow::Context; use base_db::{ CrateBuilderId, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroLoadingError, - ProcMacroPaths, TargetLayoutLoadResult, + ProcMacroPaths, target::TargetLoadResult, }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; use intern::{Symbol, sym}; @@ -30,7 +30,7 @@ use crate::{ env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, project_json::{Crate, CrateArrayIdx}, sysroot::RustLibSrcWorkspace, - toolchain_info::{QueryConfig, rustc_cfg, target_data_layout, target_tuple, version}, + toolchain_info::{QueryConfig, rustc_cfg, target_data, target_tuple, version}, utf8_stdout, }; use tracing::{debug, error, info}; @@ -63,7 +63,7 @@ pub struct ProjectWorkspace { /// The toolchain version used by this workspace. pub toolchain: Option, /// The target data layout queried for workspace. - pub target_layout: TargetLayoutLoadResult, + pub target: TargetLoadResult, /// A set of cfg overrides for this workspace. pub cfg_overrides: CfgOverrides, /// Additional includes to add for the VFS. @@ -115,7 +115,7 @@ impl fmt::Debug for ProjectWorkspace { sysroot, rustc_cfg, toolchain, - target_layout, + target: target_layout, cfg_overrides, extra_includes, set_test, @@ -157,7 +157,6 @@ impl fmt::Debug for ProjectWorkspace { .field("file", &file) .field("cargo_script", &cargo_script.is_some()) .field("n_sysroot_crates", &sysroot.num_packages()) - .field("cargo_script", &cargo_script.is_some()) .field("n_rustc_cfg", &rustc_cfg.len()) .field("toolchain", &toolchain) .field("data_layout", &target_layout) @@ -310,8 +309,8 @@ impl ProjectWorkspace { let rustc_cfg = s.spawn(|| { rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env) }); - let data_layout = s.spawn(|| { - target_data_layout::get( + let target_data = s.spawn(|| { + target_data::get( toolchain_config, targets.first().map(Deref::deref), extra_env, @@ -393,7 +392,7 @@ impl ProjectWorkspace { s.spawn(move || cargo_config_env(cargo_toml, &config_file)); thread::Result::Ok(( rustc_cfg.join()?, - data_layout.join()?, + target_data.join()?, rustc_dir.join()?, loaded_sysroot.join()?, cargo_metadata.join()?, @@ -443,7 +442,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides: cfg_overrides.clone(), toolchain, - target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + target: data_layout.map_err(|it| it.to_string().into()), extra_includes: extra_includes.clone(), set_test: *set_test, }) @@ -481,11 +480,7 @@ impl ProjectWorkspace { rustc_cfg::get(query_config, targets.first().map(Deref::deref), &config.extra_env) }); let data_layout = s.spawn(|| { - target_data_layout::get( - query_config, - targets.first().map(Deref::deref), - &config.extra_env, - ) + target_data::get(query_config, targets.first().map(Deref::deref), &config.extra_env) }); let loaded_sysroot = s.spawn(|| { if let Some(sysroot_project) = sysroot_project { @@ -514,7 +509,7 @@ impl ProjectWorkspace { thread::Result::Ok((rustc_cfg.join()?, data_layout.join()?, loaded_sysroot.join()?)) }); - let (rustc_cfg, target_layout, loaded_sysroot) = match join { + let (rustc_cfg, target_data, loaded_sysroot) = match join { Ok(it) => it, Err(e) => std::panic::resume_unwind(e), }; @@ -528,7 +523,7 @@ impl ProjectWorkspace { sysroot, rustc_cfg, toolchain, - target_layout: target_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + target: target_data.map_err(|it| it.to_string().into()), cfg_overrides: config.cfg_overrides.clone(), extra_includes: config.extra_includes.clone(), set_test: config.set_test, @@ -552,7 +547,7 @@ impl ProjectWorkspace { let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env) .unwrap_or_default(); let rustc_cfg = rustc_cfg::get(query_config, None, &config.extra_env); - let data_layout = target_data_layout::get(query_config, None, &config.extra_env); + let target_data = target_data::get(query_config, None, &config.extra_env); let target_dir = config .target_dir .clone() @@ -611,7 +606,7 @@ impl ProjectWorkspace { sysroot, rustc_cfg, toolchain, - target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + target: target_data.map_err(|it| it.to_string().into()), cfg_overrides: config.cfg_overrides.clone(), extra_includes: config.extra_includes.clone(), set_test: config.set_test, @@ -943,7 +938,7 @@ impl ProjectWorkspace { let Self { kind, sysroot, cfg_overrides, rustc_cfg, .. } = self; let crate_ws_data = Arc::new(CrateWorkspaceData { toolchain: self.toolchain.clone(), - data_layout: self.target_layout.clone(), + target: self.target.clone(), }); let (crate_graph, proc_macros) = match kind { ProjectWorkspaceKind::Json(project) => project_json_to_crate_graph( @@ -1001,13 +996,15 @@ impl ProjectWorkspace { } pub fn eq_ignore_build_data(&self, other: &Self) -> bool { - let Self { kind, sysroot, rustc_cfg, toolchain, target_layout, cfg_overrides, .. } = self; + let Self { + kind, sysroot, rustc_cfg, toolchain, target: target_layout, cfg_overrides, .. + } = self; let Self { kind: o_kind, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg, toolchain: o_toolchain, - target_layout: o_target_layout, + target: o_target_layout, cfg_overrides: o_cfg_overrides, .. } = other; diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index 3722e2c721686..4f6ce4dc95374 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -70,7 +70,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -155,7 +155,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -240,7 +240,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -325,7 +325,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -406,7 +406,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index 3722e2c721686..4f6ce4dc95374 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -70,7 +70,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -155,7 +155,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -240,7 +240,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -325,7 +325,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -406,7 +406,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index 7b156ea63a58f..6862918e09ae6 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -69,7 +69,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -153,7 +153,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -237,7 +237,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -321,7 +321,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, @@ -402,7 +402,7 @@ }, }, ws_data: CrateWorkspaceData { - data_layout: Err( + target: Err( "target_data_layout not loaded", ), toolchain: None, diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt index 98fe598eb3a32..28ad3236ae813 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt @@ -43,8 +43,8 @@ entries: {}, }, ws_data: CrateWorkspaceData { - data_layout: Err( - "test has no data layout", + target: Err( + "test has no target data", ), toolchain: None, }, @@ -93,8 +93,8 @@ entries: {}, }, ws_data: CrateWorkspaceData { - data_layout: Err( - "test has no data layout", + target: Err( + "test has no target data", ), toolchain: None, }, diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index 0dc373b5b47ed..dabb3aa674414 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -40,8 +40,8 @@ entries: {}, }, ws_data: CrateWorkspaceData { - data_layout: Err( - "test has no data layout", + target: Err( + "test has no target data", ), toolchain: None, }, diff --git a/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs b/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs index c151cca07272a..22a26c49fa530 100644 --- a/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs +++ b/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs @@ -48,7 +48,8 @@ impl ToTokens for TrackedQuery { quote!(#(#options),*) }) .into_iter() - .chain(self.lru.map(|lru| quote!(lru = #lru))); + .chain(self.lru.map(|lru| quote!(lru = #lru))) + .chain(Some(quote!(unsafe(non_update_return_type)))); let annotation = quote!(#[salsa_macros::tracked( #(#options),* )]); let pat_and_tys = &self.pat_and_tys; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index ab045e0bf9ff1..cc8db1b841ea4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -160,9 +160,9 @@ fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { rust_analyzer::tracing::Config { writer, - // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually + // Deliberately enable all `warn` logs if the user has not set RA_LOG, as there is usually // useful information in there for debugging. - filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()), + filter: env::var("RA_LOG").ok().unwrap_or_else(|| "warn".to_owned()), chalk_filter: env::var("CHALK_DEBUG").ok(), profile_filter: env::var("RA_PROFILE").ok(), json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(), @@ -208,13 +208,24 @@ fn run_server() -> anyhow::Result<()> { tracing::info!("InitializeParams: {}", initialize_params); let lsp_types::InitializeParams { root_uri, - capabilities, + mut capabilities, workspace_folders, initialization_options, client_info, .. } = from_json::("InitializeParams", &initialize_params)?; + // lsp-types has a typo in the `/capabilities/workspace/diagnostics` field, its typoed as `diagnostic` + if let Some(val) = initialize_params.pointer("/capabilities/workspace/diagnostics") + && let Ok(diag_caps) = from_json::( + "DiagnosticWorkspaceClientCapabilities", + val, + ) + { + tracing::info!("Patching lsp-types workspace diagnostics capabilities: {diag_caps:#?}"); + capabilities.workspace.get_or_insert_default().diagnostic.get_or_insert(diag_caps); + } + let root_path = match root_uri .and_then(|it| it.to_file_path().ok()) .map(patch_path_prefix) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 97886844a9f9e..9551536cf4a51 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -10,15 +10,17 @@ use std::{ use cfg::{CfgAtom, CfgDiff}; use hir::{ - Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ImportPathConfig, ModuleDef, Name, + Adt, AssocItem, Crate, DefWithBody, FindPathConfig, HasCrate, HasSource, HirDisplay, ModuleDef, + Name, db::{DefDatabase, ExpandDatabase, HirDatabase}, + next_solver::{DbInterner, GenericArgs}, }; use hir_def::{ SyntheticSyntax, expr_store::BodySourceMap, hir::{ExprId, PatId}, }; -use hir_ty::{Interner, Substitution, TyExt, TypeFlags}; +use hir_ty::{Interner, TyExt, TypeFlags}; use ide::{ Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve, InlayHintsConfig, LineCol, RootDatabase, @@ -331,7 +333,7 @@ impl flags::AnalysisStats { } if self.run_all_ide_things { - self.run_ide_things(host.analysis(), file_ids.clone()); + self.run_ide_things(host.analysis(), file_ids.clone(), db, &vfs, verbosity); } if self.run_term_search { @@ -361,6 +363,7 @@ impl flags::AnalysisStats { let mut all = 0; let mut fail = 0; for &a in adts { + let interner = DbInterner::new_with(db, Some(a.krate(db).base()), None); let generic_params = db.generic_params(a.into()); if generic_params.iter_type_or_consts().next().is_some() || generic_params.iter_lt().next().is_some() @@ -371,7 +374,7 @@ impl flags::AnalysisStats { all += 1; let Err(e) = db.layout_of_adt( hir_def::AdtId::from(a), - Substitution::empty(Interner), + GenericArgs::new_from_iter(interner, []), db.trait_environment(a.into()), ) else { continue; @@ -390,15 +393,27 @@ impl flags::AnalysisStats { } fn run_const_eval(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) { + let len = bodies + .iter() + .filter(|body| matches!(body, DefWithBody::Const(_) | DefWithBody::Static(_))) + .count(); + let mut bar = match verbosity { + Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), + _ if self.parallel || self.output.is_some() => ProgressReport::hidden(), + _ => ProgressReport::new(len), + }; + let mut sw = self.stop_watch(); let mut all = 0; let mut fail = 0; for &b in bodies { + bar.set_message(move || format!("const eval: {}", full_name(db, b, b.module(db)))); let res = match b { DefWithBody::Const(c) => c.eval(db), DefWithBody::Static(s) => s.eval(db), _ => continue, }; + bar.inc(1); all += 1; let Err(error) = res else { continue; @@ -406,10 +421,11 @@ impl flags::AnalysisStats { if verbosity.is_spammy() { let full_name = full_name_of_item(db, b.module(db), b.name(db).unwrap_or(Name::missing())); - println!("Const eval for {full_name} failed due {error:?}"); + bar.println(format!("Const eval for {full_name} failed due {error:?}")); } fail += 1; } + bar.finish_and_clear(); let const_eval_time = sw.elapsed(); eprintln!("{:<20} {}", "Const evaluation:", const_eval_time); eprintln!("Failed const evals: {fail} ({}%)", percentage(fail, all)); @@ -535,7 +551,7 @@ impl flags::AnalysisStats { .gen_source_code( &scope, &mut formatter, - ImportPathConfig { + FindPathConfig { prefer_no_std: false, prefer_prelude: true, prefer_absolute: false, @@ -659,6 +675,10 @@ impl flags::AnalysisStats { let mut all = 0; let mut fail = 0; for &body_id in bodies { + bar.set_message(move || { + format!("mir lowering: {}", full_name(db, body_id, body_id.module(db))) + }); + bar.inc(1); if matches!(body_id, DefWithBody::Variant(_)) { continue; } @@ -1086,12 +1106,29 @@ impl flags::AnalysisStats { report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms"); } - fn run_ide_things(&self, analysis: Analysis, mut file_ids: Vec) { + fn run_ide_things( + &self, + analysis: Analysis, + mut file_ids: Vec, + db: &RootDatabase, + vfs: &Vfs, + verbosity: Verbosity, + ) { + let len = file_ids.len(); + let create_bar = || match verbosity { + Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), + _ if self.parallel || self.output.is_some() => ProgressReport::hidden(), + _ => ProgressReport::new(len), + }; + file_ids.sort(); file_ids.dedup(); let mut sw = self.stop_watch(); + let mut bar = create_bar(); for &file_id in &file_ids { + let msg = format!("diagnostics: {}", vfs.file_path(file_id.file_id(db))); + bar.set_message(move || msg.clone()); _ = analysis.full_diagnostics( &DiagnosticsConfig { enabled: true, @@ -1118,8 +1155,14 @@ impl flags::AnalysisStats { ide::AssistResolveStrategy::All, analysis.editioned_file_id_to_vfs(file_id), ); + bar.inc(1); } + bar.finish_and_clear(); + + let mut bar = create_bar(); for &file_id in &file_ids { + let msg = format!("inlay hints: {}", vfs.file_path(file_id.file_id(db))); + bar.set_message(move || msg.clone()); _ = analysis.inlay_hints( &InlayHintsConfig { render_colons: false, @@ -1134,6 +1177,7 @@ impl flags::AnalysisStats { }, chaining_hints: true, adjustment_hints: ide::AdjustmentHints::Always, + adjustment_hints_disable_reborrows: true, adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix, adjustment_hints_hide_outside_unsafe: false, closure_return_type_hints: ide::ClosureReturnTypeHints::Always, @@ -1154,8 +1198,14 @@ impl flags::AnalysisStats { analysis.editioned_file_id_to_vfs(file_id), None, ); + bar.inc(1); } + bar.finish_and_clear(); + + let mut bar = create_bar(); for &file_id in &file_ids { + let msg = format!("annotations: {}", vfs.file_path(file_id.file_id(db))); + bar.set_message(move || msg.clone()); analysis .annotations( &AnnotationConfig { @@ -1174,7 +1224,10 @@ impl flags::AnalysisStats { .for_each(|annotation| { _ = analysis.resolve_annotation(annotation); }); + bar.inc(1); } + bar.finish_and_clear(); + let ide_time = sw.elapsed(); eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len()); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs index 7b12cb14009ff..82590c8e707fa 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs @@ -9,7 +9,7 @@ use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Se use ide_db::{LineIndexDatabase, base_db::SourceDatabase}; use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace_at}; -use crate::cli::flags; +use crate::cli::{flags, progress_report::ProgressReport}; impl flags::Diagnostics { pub fn run(self) -> anyhow::Result<()> { @@ -50,23 +50,26 @@ impl flags::Diagnostics { let mut found_error = false; let mut visited_files = FxHashSet::default(); - - let work = all_modules(db).into_iter().filter(|module| { - let file_id = module.definition_source_file_id(db).original_file(db); - let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db); - let source_root = db.source_root(source_root).source_root(db); - !source_root.is_library - }); - + let min_severity = self.severity.unwrap_or(flags::Severity::Weak); + + let work = all_modules(db) + .into_iter() + .filter(|module| { + let file_id = module.definition_source_file_id(db).original_file(db); + let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db); + let source_root = db.source_root(source_root).source_root(db); + !source_root.is_library + }) + .collect::>(); + + let mut bar = ProgressReport::new(work.len()); for module in work { let file_id = module.definition_source_file_id(db).original_file(db); if !visited_files.contains(&file_id) { + let message = format!("processing {}", _vfs.file_path(file_id.file_id(db))); + bar.set_message(move || message.clone()); let crate_name = module.krate().display_name(db).as_deref().unwrap_or(&sym::unknown).to_owned(); - println!( - "processing crate: {crate_name}, module: {}", - _vfs.file_path(file_id.file_id(db)) - ); for diagnostic in analysis .full_diagnostics( &DiagnosticsConfig::test_sample(), @@ -75,6 +78,16 @@ impl flags::Diagnostics { ) .unwrap() { + let severity = match diagnostic.severity { + Severity::Error => flags::Severity::Error, + Severity::Warning => flags::Severity::Warning, + Severity::WeakWarning => flags::Severity::Weak, + Severity::Allow => continue, + }; + if severity < min_severity { + continue; + } + if matches!(diagnostic.severity, Severity::Error) { found_error = true; } @@ -83,12 +96,17 @@ impl flags::Diagnostics { let line_index = db.line_index(range.file_id); let start = line_index.line_col(range.range.start()); let end = line_index.line_col(range.range.end()); - println!("{severity:?} {code:?} from {start:?} to {end:?}: {message}"); + bar.println(format!( + "at crate {crate_name}, file {}: {severity:?} {code:?} from {start:?} to {end:?}: {message}", + _vfs.file_path(file_id.file_id(db)) + )); } visited_files.insert(file_id); } + bar.inc(1); } + bar.finish_and_clear(); println!(); println!("diagnostic scan complete"); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs index 16f351272b691..75030bedfca3f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs @@ -124,6 +124,9 @@ xflags::xflags! { optional --disable-proc-macros /// Run the proc-macro-srv binary at the specified path. optional --proc-macro-srv path: PathBuf + + /// The minimum severity. + optional --severity severity: Severity } /// Report unresolved references @@ -281,6 +284,7 @@ pub struct Diagnostics { pub disable_build_scripts: bool, pub disable_proc_macros: bool, pub proc_macro_srv: Option, + pub severity: Option, } #[derive(Debug)] @@ -376,3 +380,23 @@ impl FromStr for OutputFormat { } } } + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum Severity { + Weak, + Warning, + Error, +} + +impl FromStr for Severity { + type Err = String; + + fn from_str(s: &str) -> Result { + match &*s.to_ascii_lowercase() { + "weak" => Ok(Self::Weak), + "warning" => Ok(Self::Warning), + "error" => Ok(Self::Error), + _ => Err(format!("unknown severity `{s}`")), + } + } +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 36ae98b321b84..609ebf2b514f0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -11,7 +11,7 @@ use ide_db::base_db; use itertools::Either; use paths::Utf8PathBuf; use profile::StopWatch; -use project_model::toolchain_info::{QueryConfig, target_data_layout}; +use project_model::toolchain_info::{QueryConfig, target_data}; use project_model::{ CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource, RustSourceWorkspaceConfig, Sysroot, @@ -19,7 +19,6 @@ use project_model::{ use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace}; use rustc_hash::FxHashMap; -use triomphe::Arc; use vfs::{AbsPathBuf, FileId}; use walkdir::WalkDir; @@ -87,7 +86,7 @@ impl Tester { sysroot.set_workspace(loaded_sysroot); } - let data_layout = target_data_layout::get( + let target_data = target_data::get( QueryConfig::Rustc(&sysroot, tmp_file.parent().unwrap().as_ref()), None, &cargo_config.extra_env, @@ -101,7 +100,7 @@ impl Tester { sysroot, rustc_cfg: vec![], toolchain: None, - target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + target: target_data.map_err(|it| it.to_string().into()), cfg_overrides: Default::default(), extra_includes: vec![], set_test: true, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs index 9fad6723afcd9..d7af56d3e15be 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/symbols.rs @@ -1,5 +1,5 @@ //! Read Rust code on stdin, print syntax tree on stdout. -use ide::Analysis; +use ide::{Analysis, FileStructureConfig}; use crate::cli::{flags, read_stdin}; @@ -7,7 +7,12 @@ impl flags::Symbols { pub fn run(self) -> anyhow::Result<()> { let text = read_stdin()?; let (analysis, file_id) = Analysis::from_single_file(text); - let structure = analysis.file_structure(file_id).unwrap(); + let structure = analysis + // The default setting in config.rs (document_symbol_search_excludeLocals) is to exclude + // locals because it is unlikely that users want document search to return the names of + // local variables, but here we include them deliberately. + .file_structure(&FileStructureConfig { exclude_locals: false }, file_id) + .unwrap(); for s in structure { println!("{s:?}"); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 1a00295b9ac18..6b489d5114381 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -226,6 +226,14 @@ config_data! { inlayHints_discriminantHints_enable: DiscriminantHintsDef = DiscriminantHintsDef::Never, + /// Disable reborrows in expression adjustments inlay hints. + /// + /// Reborrows are a pair of a builtin deref then borrow, i.e. `&*`. They are inserted by the compiler but are mostly useless to the programmer. + /// + /// Note: if the deref is not builtin (an overloaded deref), or the borrow is `&raw const`/`&raw mut`, they are not removed. + inlayHints_expressionAdjustmentHints_disableReborrows: bool = + true, + /// Show inlay hints for type adjustments. inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = AdjustmentHintsDef::Never, @@ -374,6 +382,13 @@ config_data! { /// Exclude tests from find-all-references and call-hierarchy. references_excludeTests: bool = false, + /// Use semantic tokens for comments. + /// + /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. + /// By disabling semantic tokens for comments, other grammars can be used to highlight + /// their contents. + semanticHighlighting_comments_enable: bool = true, + /// Inject additional highlighting into doc comments. /// /// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra @@ -858,6 +873,9 @@ config_data! { /// check will be performed. check_workspace: bool = true, + /// Exclude all locals from document symbol search. + document_symbol_search_excludeLocals: bool = true, + /// These proc-macros will be ignored when trying to expand them. /// /// This config takes a map of crate names with the exported proc-macro names to ignore as values. @@ -1481,6 +1499,13 @@ pub enum FilesWatcher { Server, } +/// Configuration for document symbol search requests. +#[derive(Debug, Clone)] +pub struct DocumentSymbolConfig { + /// Should locals be excluded. + pub search_exclude_locals: bool, +} + #[derive(Debug, Clone)] pub struct NotificationsConfig { pub cargo_toml_not_found: bool, @@ -1878,12 +1903,14 @@ impl Config { AdjustmentHintsDef::Always => ide::AdjustmentHints::Always, AdjustmentHintsDef::Never => match self.inlayHints_reborrowHints_enable() { ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => { - ide::AdjustmentHints::ReborrowOnly + ide::AdjustmentHints::BorrowsOnly } ReborrowHintsDef::Never => ide::AdjustmentHints::Never, }, - AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly, + AdjustmentHintsDef::Borrows => ide::AdjustmentHints::BorrowsOnly, }, + adjustment_hints_disable_reborrows: *self + .inlayHints_expressionAdjustmentHints_disableReborrows(), adjustment_hints_mode: match self.inlayHints_expressionAdjustmentHints_mode() { AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix, AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix, @@ -1948,6 +1975,7 @@ impl Config { pub fn highlighting_config(&self) -> HighlightConfig { HighlightConfig { strings: self.semanticHighlighting_strings_enable().to_owned(), + comments: self.semanticHighlighting_comments_enable().to_owned(), punctuation: self.semanticHighlighting_punctuation_enable().to_owned(), specialize_punctuation: self .semanticHighlighting_punctuation_specialization_enable() @@ -2438,6 +2466,12 @@ impl Config { } } + pub fn document_symbol(&self, source_root: Option) -> DocumentSymbolConfig { + DocumentSymbolConfig { + search_exclude_locals: *self.document_symbol_search_excludeLocals(source_root), + } + } + pub fn workspace_symbol(&self, source_root: Option) -> WorkspaceSymbolConfig { WorkspaceSymbolConfig { search_exclude_imports: *self.workspace_symbol_search_excludeImports(source_root), @@ -2806,7 +2840,8 @@ enum ReborrowHintsDef { #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] enum AdjustmentHintsDef { - Reborrow, + #[serde(alias = "reborrow")] + Borrows, #[serde(with = "true_or_always")] #[serde(untagged)] Always, @@ -3067,7 +3102,7 @@ macro_rules! _config_data { }) => { /// Default config values for this grouping. #[allow(non_snake_case)] - #[derive(Debug, Clone )] + #[derive(Debug, Clone)] struct $name { $($field: $ty,)* } impl_for_config_data!{ diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index 438a2a0ba1ea1..ee50237c405e6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -26,6 +26,17 @@ pub struct DiagnosticsMapConfig { pub(crate) type DiagnosticsGeneration = usize; +#[derive(Debug, Clone, Default)] +pub(crate) struct WorkspaceFlycheckDiagnostic { + pub(crate) per_package: FxHashMap>, PackageFlycheckDiagnostic>, +} + +#[derive(Debug, Clone)] +pub(crate) struct PackageFlycheckDiagnostic { + generation: DiagnosticsGeneration, + per_file: FxHashMap>, +} + #[derive(Debug, Default, Clone)] pub(crate) struct DiagnosticCollection { // FIXME: should be FxHashMap> @@ -33,9 +44,7 @@ pub(crate) struct DiagnosticCollection { FxHashMap)>, pub(crate) native_semantic: FxHashMap)>, - // FIXME: should be Vec - pub(crate) check: - Vec>, FxHashMap>>>, + pub(crate) check: Vec, pub(crate) check_fixes: CheckFixes, changes: FxHashSet, /// Counter for supplying a new generation number for diagnostics. @@ -57,7 +66,7 @@ impl DiagnosticCollection { let Some(check) = self.check.get_mut(flycheck_id) else { return; }; - self.changes.extend(check.drain().flat_map(|(_, v)| v.into_keys())); + self.changes.extend(check.per_package.drain().flat_map(|(_, v)| v.per_file.into_keys())); if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(flycheck_id) { fixes.clear(); } @@ -66,7 +75,9 @@ impl DiagnosticCollection { pub(crate) fn clear_check_all(&mut self) { Arc::make_mut(&mut self.check_fixes).clear(); self.changes.extend( - self.check.iter_mut().flat_map(|it| it.drain().flat_map(|(_, v)| v.into_keys())), + self.check + .iter_mut() + .flat_map(|it| it.per_package.drain().flat_map(|(_, v)| v.per_file.into_keys())), ) } @@ -79,14 +90,36 @@ impl DiagnosticCollection { return; }; let package_id = Some(package_id); - if let Some(checks) = check.remove(&package_id) { - self.changes.extend(checks.into_keys()); + if let Some(checks) = check.per_package.remove(&package_id) { + self.changes.extend(checks.per_file.into_keys()); } if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(flycheck_id) { fixes.remove(&package_id); } } + pub(crate) fn clear_check_older_than( + &mut self, + flycheck_id: usize, + generation: DiagnosticsGeneration, + ) { + if let Some(flycheck) = self.check.get_mut(flycheck_id) { + let mut packages = vec![]; + self.changes.extend( + flycheck + .per_package + .extract_if(|_, v| v.generation < generation) + .inspect(|(package_id, _)| packages.push(package_id.clone())) + .flat_map(|(_, v)| v.per_file.into_keys()), + ); + if let Some(fixes) = Arc::make_mut(&mut self.check_fixes).get_mut(flycheck_id) { + for package in packages { + fixes.remove(&package); + } + } + } + } + pub(crate) fn clear_native_for(&mut self, file_id: FileId) { self.native_syntax.remove(&file_id); self.native_semantic.remove(&file_id); @@ -96,19 +129,26 @@ impl DiagnosticCollection { pub(crate) fn add_check_diagnostic( &mut self, flycheck_id: usize, + generation: DiagnosticsGeneration, package_id: &Option>, file_id: FileId, diagnostic: lsp_types::Diagnostic, fix: Option>, ) { if self.check.len() <= flycheck_id { - self.check.resize_with(flycheck_id + 1, Default::default); + self.check.resize_with(flycheck_id + 1, WorkspaceFlycheckDiagnostic::default); + } + + let check = &mut self.check[flycheck_id]; + let package = check.per_package.entry(package_id.clone()).or_insert_with(|| { + PackageFlycheckDiagnostic { generation, per_file: FxHashMap::default() } + }); + // Getting message from old generation. Might happen in restarting checks. + if package.generation > generation { + return; } - let diagnostics = self.check[flycheck_id] - .entry(package_id.clone()) - .or_default() - .entry(file_id) - .or_default(); + package.generation = generation; + let diagnostics = package.per_file.entry(file_id).or_default(); for existing_diagnostic in diagnostics.iter() { if are_diagnostics_equal(existing_diagnostic, &diagnostic) { return; @@ -177,8 +217,8 @@ impl DiagnosticCollection { let check = self .check .iter() - .flat_map(|it| it.values()) - .filter_map(move |it| it.get(&file_id)) + .flat_map(|it| it.per_package.values()) + .filter_map(move |it| it.per_file.get(&file_id)) .flatten(); native_syntax.chain(native_semantic).chain(check) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index e4e0bcdc1cd08..315c45d5b6392 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -1,7 +1,12 @@ //! Flycheck provides the functionality needed to run `cargo check` to provide //! LSP diagnostics based on the output of the command. -use std::{fmt, io, process::Command, time::Duration}; +use std::{ + fmt, io, + process::Command, + sync::atomic::{AtomicUsize, Ordering}, + time::Duration, +}; use cargo_metadata::PackageId; use crossbeam_channel::{Receiver, Sender, select_biased, unbounded}; @@ -18,7 +23,10 @@ pub(crate) use cargo_metadata::diagnostic::{ use toolchain::Tool; use triomphe::Arc; -use crate::command::{CargoParser, CommandHandle}; +use crate::{ + command::{CargoParser, CommandHandle}, + diagnostics::DiagnosticsGeneration, +}; #[derive(Clone, Debug, Default, PartialEq, Eq)] pub(crate) enum InvocationStrategy { @@ -96,11 +104,11 @@ pub(crate) enum FlycheckConfig { } impl FlycheckConfig { - pub(crate) fn invocation_strategy_once(&self) -> bool { + pub(crate) fn invocation_strategy(&self) -> InvocationStrategy { match self { - FlycheckConfig::CargoCommand { .. } => false, + FlycheckConfig::CargoCommand { .. } => InvocationStrategy::PerWorkspace, FlycheckConfig::CustomCommand { invocation_strategy, .. } => { - *invocation_strategy == InvocationStrategy::Once + invocation_strategy.clone() } } } @@ -138,36 +146,54 @@ pub(crate) struct FlycheckHandle { sender: Sender, _thread: stdx::thread::JoinHandle, id: usize, + generation: AtomicUsize, } impl FlycheckHandle { pub(crate) fn spawn( id: usize, + generation: DiagnosticsGeneration, sender: Sender, config: FlycheckConfig, sysroot_root: Option, workspace_root: AbsPathBuf, manifest_path: Option, ) -> FlycheckHandle { - let actor = - FlycheckActor::new(id, sender, config, sysroot_root, workspace_root, manifest_path); + let actor = FlycheckActor::new( + id, + generation, + sender, + config, + sysroot_root, + workspace_root, + manifest_path, + ); let (sender, receiver) = unbounded::(); let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, format!("Flycheck{id}")) .spawn(move || actor.run(receiver)) .expect("failed to spawn thread"); - FlycheckHandle { id, sender, _thread: thread } + FlycheckHandle { id, generation: generation.into(), sender, _thread: thread } } /// Schedule a re-start of the cargo check worker to do a workspace wide check. pub(crate) fn restart_workspace(&self, saved_file: Option) { - self.sender.send(StateChange::Restart { package: None, saved_file, target: None }).unwrap(); + let generation = self.generation.fetch_add(1, Ordering::Relaxed) + 1; + self.sender + .send(StateChange::Restart { generation, package: None, saved_file, target: None }) + .unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. pub(crate) fn restart_for_package(&self, package: String, target: Option) { + let generation = self.generation.fetch_add(1, Ordering::Relaxed) + 1; self.sender - .send(StateChange::Restart { package: Some(package), saved_file: None, target }) + .send(StateChange::Restart { + generation, + package: Some(package), + saved_file: None, + target, + }) .unwrap(); } @@ -179,23 +205,31 @@ impl FlycheckHandle { pub(crate) fn id(&self) -> usize { self.id } + + pub(crate) fn generation(&self) -> DiagnosticsGeneration { + self.generation.load(Ordering::Relaxed) + } +} + +#[derive(Debug)] +pub(crate) enum ClearDiagnosticsKind { + All, + OlderThan(DiagnosticsGeneration), + Package(Arc), } pub(crate) enum FlycheckMessage { /// Request adding a diagnostic with fixes included to a file AddDiagnostic { id: usize, + generation: DiagnosticsGeneration, workspace_root: Arc, diagnostic: Diagnostic, package_id: Option>, }, /// Request clearing all outdated diagnostics. - ClearDiagnostics { - id: usize, - /// The package whose diagnostics to clear, or if unspecified, all diagnostics. - package_id: Option>, - }, + ClearDiagnostics { id: usize, kind: ClearDiagnosticsKind }, /// Request check progress notification to client Progress { @@ -208,18 +242,23 @@ pub(crate) enum FlycheckMessage { impl fmt::Debug for FlycheckMessage { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => f + FlycheckMessage::AddDiagnostic { + id, + generation, + workspace_root, + diagnostic, + package_id, + } => f .debug_struct("AddDiagnostic") .field("id", id) + .field("generation", generation) .field("workspace_root", workspace_root) .field("package_id", package_id) .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code)) .finish(), - FlycheckMessage::ClearDiagnostics { id, package_id } => f - .debug_struct("ClearDiagnostics") - .field("id", id) - .field("package_id", package_id) - .finish(), + FlycheckMessage::ClearDiagnostics { id, kind } => { + f.debug_struct("ClearDiagnostics").field("id", id).field("kind", kind).finish() + } FlycheckMessage::Progress { id, progress } => { f.debug_struct("Progress").field("id", id).field("progress", progress).finish() } @@ -237,7 +276,12 @@ pub(crate) enum Progress { } enum StateChange { - Restart { package: Option, saved_file: Option, target: Option }, + Restart { + generation: DiagnosticsGeneration, + package: Option, + saved_file: Option, + target: Option, + }, Cancel, } @@ -246,6 +290,7 @@ struct FlycheckActor { /// The workspace id of this flycheck instance. id: usize, + generation: DiagnosticsGeneration, sender: Sender, config: FlycheckConfig, manifest_path: Option, @@ -283,6 +328,7 @@ pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; impl FlycheckActor { fn new( id: usize, + generation: DiagnosticsGeneration, sender: Sender, config: FlycheckConfig, sysroot_root: Option, @@ -292,6 +338,7 @@ impl FlycheckActor { tracing::info!(%id, ?workspace_root, "Spawning flycheck"); FlycheckActor { id, + generation, sender, config, sysroot_root, @@ -327,7 +374,12 @@ impl FlycheckActor { tracing::debug!(flycheck_id = self.id, "flycheck cancelled"); self.cancel_check_process(); } - Event::RequestStateChange(StateChange::Restart { package, saved_file, target }) => { + Event::RequestStateChange(StateChange::Restart { + generation, + package, + saved_file, + target, + }) => { // Cancel the previously spawned process self.cancel_check_process(); while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { @@ -337,6 +389,8 @@ impl FlycheckActor { } } + self.generation = generation; + let Some(command) = self.check_command(package.as_deref(), saved_file.as_deref(), target) else { @@ -383,7 +437,16 @@ impl FlycheckActor { // Clear everything for good measure self.send(FlycheckMessage::ClearDiagnostics { id: self.id, - package_id: None, + kind: ClearDiagnosticsKind::All, + }); + } else if res.is_ok() { + // We clear diagnostics for packages on + // `[CargoCheckMessage::CompilerArtifact]` but there seem to be setups where + // cargo may not report an artifact to our runner at all. To handle such + // cases, clear stale diagnostics when flycheck completes successfully. + self.send(FlycheckMessage::ClearDiagnostics { + id: self.id, + kind: ClearDiagnosticsKind::OlderThan(self.generation), }); } self.clear_diagnostics_state(); @@ -412,7 +475,7 @@ impl FlycheckActor { ); self.send(FlycheckMessage::ClearDiagnostics { id: self.id, - package_id: Some(package_id), + kind: ClearDiagnosticsKind::Package(package_id), }); } } @@ -435,7 +498,7 @@ impl FlycheckActor { ); self.send(FlycheckMessage::ClearDiagnostics { id: self.id, - package_id: Some(package_id.clone()), + kind: ClearDiagnosticsKind::Package(package_id.clone()), }); } } else if self.diagnostics_received @@ -444,11 +507,12 @@ impl FlycheckActor { self.diagnostics_received = DiagnosticsReceived::YesAndClearedForAll; self.send(FlycheckMessage::ClearDiagnostics { id: self.id, - package_id: None, + kind: ClearDiagnosticsKind::All, }); } self.send(FlycheckMessage::AddDiagnostic { id: self.id, + generation: self.generation, package_id, workspace_root: self.root.clone(), diagnostic, @@ -465,7 +529,7 @@ impl FlycheckActor { if let Some(command_handle) = self.command_handle.take() { tracing::debug!( command = ?command_handle, - "did cancel flycheck" + "did cancel flycheck" ); command_handle.cancel(); self.command_receiver.take(); @@ -524,6 +588,7 @@ impl FlycheckActor { cmd.arg("--manifest-path"); cmd.arg(manifest_path); if manifest_path.extension() == Some("rs") { + cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); cmd.arg("-Zscript"); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 2f1afba3634ef..89d6fb8edc2e6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -183,6 +183,10 @@ pub(crate) struct GlobalState { /// this queue should run only *after* [`GlobalState::process_changes`] has /// been called. pub(crate) deferred_task_queue: TaskQueue, + /// HACK: Workaround for https://github.com/rust-lang/rust-analyzer/issues/19709 + /// This is marked true if we failed to load a crate root file at crate graph creation, + /// which will usually end up causing a bunch of incorrect diagnostics on startup. + pub(crate) incomplete_crate_graph: bool, } /// An immutable snapshot of the world's state at a point in time. @@ -298,6 +302,7 @@ impl GlobalState { discover_workspace_queue: OpQueue::default(), deferred_task_queue: task_queue, + incomplete_crate_graph: false, }; // Apply any required database inputs from the config. this.update_configuration(config); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index b25245dd884a4..10bbb0bb31d99 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -141,7 +141,7 @@ impl RequestDispatcher<'_> { Result: Serialize, > + 'static, { - if !self.global_state.vfs_done { + if !self.global_state.vfs_done || self.global_state.incomplete_crate_graph { if let Some(lsp_server::Request { id, .. }) = self.req.take_if(|it| it.method == R::METHOD) { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index e193ff77743d1..68c91a653940b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -1,9 +1,11 @@ //! This module is responsible for implementing handlers for Language Server //! Protocol. This module specifically handles notifications. -use std::ops::{Deref, Not as _}; +use std::{ + ops::{Deref, Not as _}, + panic::UnwindSafe, +}; -use ide_db::base_db::salsa::Cancelled; use itertools::Itertools; use lsp_types::{ CancelParams, DidChangeConfigurationParams, DidChangeTextDocumentParams, @@ -16,7 +18,7 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath}; use crate::{ config::{Config, ConfigChange}, - flycheck::Target, + flycheck::{InvocationStrategy, Target}, global_state::{FetchWorkspaceRequest, GlobalState}, lsp::{from_proto, utils::apply_document_changes}, lsp_ext::{self, RunFlycheckParams}, @@ -301,124 +303,165 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let file_id = state.vfs.read().0.file_id(&vfs_path); if let Some((file_id, vfs::FileExcluded::No)) = file_id { let world = state.snapshot(); - let invocation_strategy_once = state.config.flycheck(None).invocation_strategy_once(); + let invocation_strategy = state.config.flycheck(None).invocation_strategy(); let may_flycheck_workspace = state.config.flycheck_workspace(None); - let mut updated = false; - let task = move || -> std::result::Result<(), Cancelled> { - if invocation_strategy_once { - let saved_file = vfs_path.as_path().map(|p| p.to_owned()); - world.flycheck[0].restart_workspace(saved_file); - } - let target = TargetSpec::for_file(&world, file_id)?.and_then(|it| { - let tgt_kind = it.target_kind(); - let (tgt_name, root, package) = match it { - TargetSpec::Cargo(c) => (c.target, c.workspace_root, c.package), - _ => return None, - }; - - let tgt = match tgt_kind { - project_model::TargetKind::Bin => Target::Bin(tgt_name), - project_model::TargetKind::Example => Target::Example(tgt_name), - project_model::TargetKind::Test => Target::Test(tgt_name), - project_model::TargetKind::Bench => Target::Benchmark(tgt_name), - _ => return Some((None, root, package)), - }; - - Some((Some(tgt), root, package)) - }); - tracing::debug!(?target, "flycheck target"); - // we have a specific non-library target, attempt to only check that target, nothing - // else will be affected - if let Some((target, root, package)) = target { - // trigger a package check if we have a non-library target as that can't affect - // anything else in the workspace OR if we're not allowed to check the workspace as - // the user opted into package checks then - let package_check_allowed = target.is_some() || !may_flycheck_workspace; - if package_check_allowed { - let workspace = world.workspaces.iter().position(|ws| match &ws.kind { - project_model::ProjectWorkspaceKind::Cargo { cargo, .. } - | project_model::ProjectWorkspaceKind::DetachedFile { - cargo: Some((cargo, _, _)), - .. - } => *cargo.workspace_root() == root, - _ => false, - }); - if let Some(idx) = workspace { - world.flycheck[idx].restart_for_package(package, target); - } + let task: Box ide::Cancellable<()> + Send + UnwindSafe> = + match invocation_strategy { + InvocationStrategy::Once => { + Box::new(move || { + // FIXME: Because triomphe::Arc's auto UnwindSafe impl requires that the inner type + // be UnwindSafe, and FlycheckHandle is not UnwindSafe, `word.flycheck` cannot + // be captured directly. std::sync::Arc has an UnwindSafe impl that only requires + // that the inner type be RefUnwindSafe, so if we were using that one we wouldn't + // have this problem. Remove the line below when triomphe::Arc has an UnwindSafe impl + // like std::sync::Arc's. + let world = world; + stdx::always!( + world.flycheck.len() == 1, + "should have exactly one flycheck handle when invocation strategy is once" + ); + let saved_file = vfs_path.as_path().map(ToOwned::to_owned); + world.flycheck[0].restart_workspace(saved_file); + Ok(()) + }) } - } - - if !may_flycheck_workspace { - return Ok(()); - } - - // Trigger flychecks for all workspaces that depend on the saved file - // Crates containing or depending on the saved file - let crate_ids = world - .analysis - .crates_for(file_id)? - .into_iter() - .flat_map(|id| world.analysis.transitive_rev_deps(id)) - .flatten() - .unique() - .collect::>(); - tracing::debug!(?crate_ids, "flycheck crate ids"); - let crate_root_paths: Vec<_> = crate_ids - .iter() - .filter_map(|&crate_id| { - world - .analysis - .crate_root(crate_id) - .map(|file_id| { - world.file_id_to_file_path(file_id).as_path().map(ToOwned::to_owned) - }) - .transpose() - }) - .collect::>()?; - let crate_root_paths: Vec<_> = crate_root_paths.iter().map(Deref::deref).collect(); - tracing::debug!(?crate_root_paths, "flycheck crate roots"); - - // Find all workspaces that have at least one target containing the saved file - let workspace_ids = - world.workspaces.iter().enumerate().filter(|(_, ws)| match &ws.kind { - project_model::ProjectWorkspaceKind::Cargo { cargo, .. } - | project_model::ProjectWorkspaceKind::DetachedFile { - cargo: Some((cargo, _, _)), - .. - } => cargo.packages().any(|pkg| { - cargo[pkg] - .targets + InvocationStrategy::PerWorkspace => { + Box::new(move || { + let target = TargetSpec::for_file(&world, file_id)?.and_then(|it| { + let tgt_kind = it.target_kind(); + let (tgt_name, root, package) = match it { + TargetSpec::Cargo(c) => (c.target, c.workspace_root, c.package), + _ => return None, + }; + + let tgt = match tgt_kind { + project_model::TargetKind::Bin => Target::Bin(tgt_name), + project_model::TargetKind::Example => Target::Example(tgt_name), + project_model::TargetKind::Test => Target::Test(tgt_name), + project_model::TargetKind::Bench => Target::Benchmark(tgt_name), + _ => return Some((None, root, package)), + }; + + Some((Some(tgt), root, package)) + }); + tracing::debug!(?target, "flycheck target"); + // we have a specific non-library target, attempt to only check that target, nothing + // else will be affected + let mut package_workspace_idx = None; + if let Some((target, root, package)) = target { + // trigger a package check if we have a non-library target as that can't affect + // anything else in the workspace OR if we're not allowed to check the workspace as + // the user opted into package checks then + let package_check_allowed = target.is_some() || !may_flycheck_workspace; + if package_check_allowed { + package_workspace_idx = + world.workspaces.iter().position(|ws| match &ws.kind { + project_model::ProjectWorkspaceKind::Cargo { + cargo, + .. + } + | project_model::ProjectWorkspaceKind::DetachedFile { + cargo: Some((cargo, _, _)), + .. + } => *cargo.workspace_root() == root, + _ => false, + }); + if let Some(idx) = package_workspace_idx { + world.flycheck[idx].restart_for_package(package, target); + } + } + } + + if !may_flycheck_workspace { + return Ok(()); + } + + // Trigger flychecks for all workspaces that depend on the saved file + // Crates containing or depending on the saved file + let crate_ids: Vec<_> = world + .analysis + .crates_for(file_id)? + .into_iter() + .flat_map(|id| world.analysis.transitive_rev_deps(id)) + .flatten() + .unique() + .collect(); + tracing::debug!(?crate_ids, "flycheck crate ids"); + let crate_root_paths: Vec<_> = crate_ids .iter() - .any(|&it| crate_root_paths.contains(&cargo[it].root.as_path())) - }), - project_model::ProjectWorkspaceKind::Json(project) => project - .crates() - .any(|(_, krate)| crate_root_paths.contains(&krate.root_module.as_path())), - project_model::ProjectWorkspaceKind::DetachedFile { .. } => false, - }); - - let saved_file = vfs_path.as_path().map(|p| p.to_owned()); - - // Find and trigger corresponding flychecks - 'flychecks: for flycheck in world.flycheck.iter() { - for (id, _) in workspace_ids.clone() { - if id == flycheck.id() { - updated = true; - flycheck.restart_workspace(saved_file.clone()); - continue 'flychecks; - } - } - } - // No specific flycheck was triggered, so let's trigger all of them. - if !updated { - for flycheck in world.flycheck.iter() { - flycheck.restart_workspace(saved_file.clone()); + .filter_map(|&crate_id| { + world + .analysis + .crate_root(crate_id) + .map(|file_id| { + world + .file_id_to_file_path(file_id) + .as_path() + .map(ToOwned::to_owned) + }) + .transpose() + }) + .collect::>()?; + let crate_root_paths: Vec<_> = + crate_root_paths.iter().map(Deref::deref).collect(); + tracing::debug!(?crate_root_paths, "flycheck crate roots"); + + // Find all workspaces that have at least one target containing the saved file + let workspace_ids = + world.workspaces.iter().enumerate().filter(|&(idx, ws)| { + let ws_contains_file = match &ws.kind { + project_model::ProjectWorkspaceKind::Cargo { + cargo, .. + } + | project_model::ProjectWorkspaceKind::DetachedFile { + cargo: Some((cargo, _, _)), + .. + } => cargo.packages().any(|pkg| { + cargo[pkg].targets.iter().any(|&it| { + crate_root_paths.contains(&cargo[it].root.as_path()) + }) + }), + project_model::ProjectWorkspaceKind::Json(project) => { + project.crates().any(|(_, krate)| { + crate_root_paths.contains(&krate.root_module.as_path()) + }) + } + project_model::ProjectWorkspaceKind::DetachedFile { + .. + } => false, + }; + let is_pkg_ws = match package_workspace_idx { + Some(pkg_idx) => pkg_idx == idx, + None => false, + }; + ws_contains_file && !is_pkg_ws + }); + + let saved_file = vfs_path.as_path().map(ToOwned::to_owned); + let mut workspace_check_triggered = false; + // Find and trigger corresponding flychecks + 'flychecks: for flycheck in world.flycheck.iter() { + for (id, _) in workspace_ids.clone() { + if id == flycheck.id() { + workspace_check_triggered = true; + flycheck.restart_workspace(saved_file.clone()); + continue 'flychecks; + } + } + } + + // No specific flycheck was triggered, so let's trigger all of them. + if !workspace_check_triggered && package_workspace_idx.is_none() { + for flycheck in world.flycheck.iter() { + flycheck.restart_workspace(saved_file.clone()); + } + } + Ok(()) + }) } - } - Ok(()) - }; + }; + state.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, move |_| { if let Err(e) = std::panic::catch_unwind(task) { tracing::error!("flycheck task panicked: {e:?}") diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index 25c0aac405e79..6cb28aecf748f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -8,8 +8,9 @@ use anyhow::Context; use base64::{Engine, prelude::BASE64_STANDARD}; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve, - FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, - RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, + FilePosition, FileRange, FileStructureConfig, HoverAction, HoverGotoTypeData, + InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind, + SingleResolve, SourceChange, TextEdit, }; use ide_db::{FxHashMap, SymbolKind}; use itertools::Itertools; @@ -566,41 +567,47 @@ pub(crate) fn handle_document_symbol( let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); let line_index = snap.file_line_index(file_id)?; - let mut parents: Vec<(lsp_types::DocumentSymbol, Option)> = Vec::new(); + let mut symbols: Vec<(lsp_types::DocumentSymbol, Option)> = Vec::new(); - for symbol in snap.analysis.file_structure(file_id)? { + let config = snap.config.document_symbol(None); + + let structure_nodes = snap.analysis.file_structure( + &FileStructureConfig { exclude_locals: config.search_exclude_locals }, + file_id, + )?; + + for node in structure_nodes { let mut tags = Vec::new(); - if symbol.deprecated { + if node.deprecated { tags.push(SymbolTag::DEPRECATED) }; #[allow(deprecated)] - let doc_symbol = lsp_types::DocumentSymbol { - name: symbol.label, - detail: symbol.detail, - kind: to_proto::structure_node_kind(symbol.kind), + let symbol = lsp_types::DocumentSymbol { + name: node.label, + detail: node.detail, + kind: to_proto::structure_node_kind(node.kind), tags: Some(tags), - deprecated: Some(symbol.deprecated), - range: to_proto::range(&line_index, symbol.node_range), - selection_range: to_proto::range(&line_index, symbol.navigation_range), + deprecated: Some(node.deprecated), + range: to_proto::range(&line_index, node.node_range), + selection_range: to_proto::range(&line_index, node.navigation_range), children: None, }; - parents.push((doc_symbol, symbol.parent)); + symbols.push((symbol, node.parent)); } - // Builds hierarchy from a flat list, in reverse order (so that indices - // makes sense) + // Builds hierarchy from a flat list, in reverse order (so that the indices make sense) let document_symbols = { let mut acc = Vec::new(); - while let Some((mut node, parent_idx)) = parents.pop() { - if let Some(children) = &mut node.children { + while let Some((mut symbol, parent_idx)) = symbols.pop() { + if let Some(children) = &mut symbol.children { children.reverse(); } let parent = match parent_idx { None => &mut acc, - Some(i) => parents[i].0.children.get_or_insert_with(Vec::new), + Some(i) => symbols[i].0.children.get_or_insert_with(Vec::new), }; - parent.push(node); + parent.push(symbol); } acc.reverse(); acc @@ -610,7 +617,7 @@ pub(crate) fn handle_document_symbol( document_symbols.into() } else { let url = to_proto::url(&snap, file_id); - let mut symbol_information = Vec::::new(); + let mut symbol_information = Vec::new(); for symbol in document_symbols { flatten_document_symbol(&symbol, None, &url, &mut symbol_information); } @@ -647,7 +654,7 @@ pub(crate) fn handle_workspace_symbol( let _p = tracing::info_span!("handle_workspace_symbol").entered(); let config = snap.config.workspace_symbol(None); - let (all_symbols, libs) = decide_search_scope_and_kind(¶ms, &config); + let (all_symbols, libs) = decide_search_kind_and_scope(¶ms, &config); let query = { let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect(); @@ -670,7 +677,7 @@ pub(crate) fn handle_workspace_symbol( return Ok(Some(lsp_types::WorkspaceSymbolResponse::Nested(res))); - fn decide_search_scope_and_kind( + fn decide_search_kind_and_scope( params: &WorkspaceSymbolParams, config: &WorkspaceSymbolConfig, ) -> (bool, bool) { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs index 02757616d4ffd..333826a1790e4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs @@ -40,12 +40,13 @@ pub(crate) fn offset( })?; let col = TextSize::from(line_col.col); let clamped_len = col.min(line_range.len()); - if clamped_len < col { - tracing::error!( - "Position {line_col:?} column exceeds line length {}, clamping it", - u32::from(line_range.len()), - ); - } + // FIXME: The cause for this is likely our request retrying. Commented out as this log is just too chatty and very easy to trigger. + // if clamped_len < col { + // tracing::error!( + // "Position {line_col:?} column exceeds line length {}, clamping it", + // u32::from(line_range.len()), + // ); + // } Ok(line_range.start() + clamped_len) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index 292be1d5315de..d51ddb86d197f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -61,7 +61,7 @@ pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind { SymbolKind::Struct => lsp_types::SymbolKind::STRUCT, SymbolKind::Enum => lsp_types::SymbolKind::ENUM, SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER, - SymbolKind::Trait | SymbolKind::TraitAlias => lsp_types::SymbolKind::INTERFACE, + SymbolKind::Trait => lsp_types::SymbolKind::INTERFACE, SymbolKind::Macro | SymbolKind::ProcMacro | SymbolKind::BuiltinAttr @@ -156,7 +156,6 @@ pub(crate) fn completion_item_kind( SymbolKind::Static => lsp_types::CompletionItemKind::VALUE, SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT, SymbolKind::Trait => lsp_types::CompletionItemKind::INTERFACE, - SymbolKind::TraitAlias => lsp_types::CompletionItemKind::INTERFACE, SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT, SymbolKind::TypeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER, SymbolKind::Union => lsp_types::CompletionItemKind::STRUCT, @@ -817,7 +816,6 @@ fn semantic_token_type_and_modifiers( SymbolKind::Union => types::UNION, SymbolKind::TypeAlias => types::TYPE_ALIAS, SymbolKind::Trait => types::INTERFACE, - SymbolKind::TraitAlias => types::INTERFACE, SymbolKind::Macro => types::MACRO, SymbolKind::ProcMacro => types::PROC_MACRO, SymbolKind::BuiltinAttr => types::BUILTIN_ATTRIBUTE, @@ -909,7 +907,6 @@ pub(crate) fn folding_range( | FoldKind::WhereClause | FoldKind::ReturnType | FoldKind::Array - | FoldKind::TraitAliases | FoldKind::ExternCrates | FoldKind::MatchArm | FoldKind::Function => None, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 61c758d5e86e1..c6762f318326a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -20,7 +20,7 @@ use crate::{ config::Config, diagnostics::{DiagnosticsGeneration, NativeDiagnosticsFetchKind, fetch_native_diagnostics}, discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage}, - flycheck::{self, FlycheckMessage}, + flycheck::{self, ClearDiagnosticsKind, FlycheckMessage}, global_state::{ FetchBuildDataResponse, FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState, file_id_to_url, url_to_file_id, @@ -812,7 +812,7 @@ impl GlobalState { }; if let Some(state) = state { - self.report_progress("Building build-artifacts", state, msg, None, None); + self.report_progress("Building compile-time-deps", state, msg, None, None); } } Task::LoadProcMacros(progress) => { @@ -1008,7 +1008,13 @@ impl GlobalState { fn handle_flycheck_msg(&mut self, message: FlycheckMessage) { match message { - FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic, package_id } => { + FlycheckMessage::AddDiagnostic { + id, + generation, + workspace_root, + diagnostic, + package_id, + } => { let snap = self.snapshot(); let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( &self.config.diagnostics_map(None), @@ -1020,6 +1026,7 @@ impl GlobalState { match url_to_file_id(&self.vfs.read().0, &diag.url) { Ok(Some(file_id)) => self.diagnostics.add_check_diagnostic( id, + generation, &package_id, file_id, diag.diagnostic, @@ -1035,12 +1042,17 @@ impl GlobalState { }; } } - FlycheckMessage::ClearDiagnostics { id, package_id: None } => { + FlycheckMessage::ClearDiagnostics { id, kind: ClearDiagnosticsKind::All } => { self.diagnostics.clear_check(id) } - FlycheckMessage::ClearDiagnostics { id, package_id: Some(package_id) } => { - self.diagnostics.clear_check_for_package(id, package_id) - } + FlycheckMessage::ClearDiagnostics { + id, + kind: ClearDiagnosticsKind::OlderThan(generation), + } => self.diagnostics.clear_check_older_than(id, generation), + FlycheckMessage::ClearDiagnostics { + id, + kind: ClearDiagnosticsKind::Package(package_id), + } => self.diagnostics.clear_check_for_package(id, package_id), FlycheckMessage::Progress { id, progress } => { let (state, message) = match progress { flycheck::Progress::DidStart => (Progress::Begin, None), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index aa38aa72d44eb..ca15e6a98e035 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -152,7 +152,9 @@ impl GlobalState { if self.fetch_build_data_error().is_err() { status.health |= lsp_ext::Health::Warning; message.push_str("Failed to run build scripts of some packages.\n\n"); - message.push_str("Please refer to the logs for more details on the errors."); + message.push_str( + "Please refer to the language server logs for more details on the errors.", + ); } if let Some(err) = &self.config_errors { status.health |= lsp_ext::Health::Warning; @@ -316,7 +318,7 @@ impl GlobalState { } } - let mut workspaces = linked_projects + let mut workspaces: Vec<_> = linked_projects .iter() .map(|project| match project { LinkedProject::ProjectManifest(manifest) => { @@ -337,7 +339,7 @@ impl GlobalState { Ok(workspace) } }) - .collect::>(); + .collect(); let mut i = 0; while i < workspaces.len() { @@ -739,13 +741,16 @@ impl GlobalState { }) .collect(); + self.incomplete_crate_graph = false; let (crate_graph, proc_macro_paths) = { // Create crate graph from all the workspaces let vfs = &self.vfs.read().0; let load = |path: &AbsPath| { let vfs_path = vfs::VfsPath::from(path.to_path_buf()); self.crate_graph_file_dependencies.insert(vfs_path.clone()); - vfs.file_id(&vfs_path).and_then(|(file_id, excluded)| { + let file_id = vfs.file_id(&vfs_path); + self.incomplete_crate_graph |= file_id.is_none(); + file_id.and_then(|(file_id, excluded)| { (excluded == vfs::FileExcluded::No).then_some(file_id) }) }; @@ -846,21 +851,17 @@ impl GlobalState { fn reload_flycheck(&mut self) { let _p = tracing::info_span!("GlobalState::reload_flycheck").entered(); let config = self.config.flycheck(None); - let sender = self.flycheck_sender.clone(); - let invocation_strategy = match config { - FlycheckConfig::CargoCommand { .. } => { - crate::flycheck::InvocationStrategy::PerWorkspace - } - FlycheckConfig::CustomCommand { ref invocation_strategy, .. } => { - invocation_strategy.clone() - } - }; + let sender = &self.flycheck_sender; + let invocation_strategy = config.invocation_strategy(); + let next_gen = + self.flycheck.iter().map(FlycheckHandle::generation).max().unwrap_or_default() + 1; self.flycheck = match invocation_strategy { crate::flycheck::InvocationStrategy::Once => { vec![FlycheckHandle::spawn( 0, - sender, + next_gen, + sender.clone(), config, None, self.config.root_path().clone(), @@ -898,6 +899,7 @@ impl GlobalState { .map(|(id, (root, manifest_path), sysroot_root)| { FlycheckHandle::spawn( id, + next_gen, sender.clone(), config.clone(), sysroot_root, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index 1b940c70da66b..8a04bc7798f8b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -1061,7 +1061,7 @@ fn main() { ), work_done_progress_params: Default::default(), }); - assert!(res.to_string().contains("&'static str")); + assert!(res.to_string().contains("&str")); let res = server.send_request::(HoverParams { text_document_position_params: TextDocumentPositionParams::new( @@ -1070,7 +1070,7 @@ fn main() { ), work_done_progress_params: Default::default(), }); - assert!(res.to_string().contains("&'static str")); + assert!(res.to_string().contains("&str")); server.request::( GotoDefinitionParams { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index 2bebb0c1b9700..3464a9644b19d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -97,6 +97,7 @@ impl Project<'_> { proc_macro_names, toolchain, target_data_layout: _, + target_arch: _, } = FixtureWithProjectMeta::parse(self.fixture); assert!(proc_macro_names.is_empty()); assert!(mini_core.is_none()); @@ -177,6 +178,7 @@ impl Project<'_> { proc_macro_names, toolchain, target_data_layout: _, + target_arch: _, } = FixtureWithProjectMeta::parse(self.fixture); assert!(proc_macro_names.is_empty()); assert!(mini_core.is_none()); diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs index a9288ecd6fa1f..e803747998b54 100644 --- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs +++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs @@ -485,8 +485,7 @@ register_has_name_ast_id! { MacroRules = name, Module = name, Static = name, - Trait = name, - TraitAlias = name + Trait = name } macro_rules! register_assoc_item_ast_id { diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 6d8a360d715b7..d73d60c51f0c8 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -154,7 +154,6 @@ Item = | Static | Struct | Trait -| TraitAlias | TypeAlias | Union | Use @@ -306,11 +305,8 @@ Trait = Attr* Visibility? 'unsafe'? 'auto'? 'trait' Name GenericParamList? - (':' TypeBoundList?)? WhereClause? AssocItemList - -TraitAlias = - Attr* Visibility? - 'trait' Name GenericParamList? '=' TypeBoundList? WhereClause? ';' + (((':' TypeBoundList?)? WhereClause? AssocItemList) | + ('=' TypeBoundList? WhereClause? ';')) AssocItemList = '{' Attr* AssocItem* '}' diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs index a9aeeedb6542e..19c1c5ebea333 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs @@ -26,8 +26,7 @@ pub use self::{ generated::{nodes::*, tokens::*}, node_ext::{ AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind, - SlicePatComponents, StructKind, TraitOrAlias, TypeBoundKind, TypeOrConstParam, - VisibilityKind, + SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind, }, operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}, token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix}, diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index b50ce6442432d..1cd8146f68630 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -99,38 +99,10 @@ impl GenericParamsOwnerEdit for ast::Trait { fn get_or_create_where_clause(&self) -> ast::WhereClause { if self.where_clause().is_none() { - let position = match self.assoc_item_list() { - Some(items) => Position::before(items.syntax()), - None => Position::last_child_of(self.syntax()), - }; - create_where_clause(position); - } - self.where_clause().unwrap() - } -} - -impl GenericParamsOwnerEdit for ast::TraitAlias { - fn get_or_create_generic_param_list(&self) -> ast::GenericParamList { - match self.generic_param_list() { - Some(it) => it, - None => { - let position = if let Some(name) = self.name() { - Position::after(name.syntax) - } else if let Some(trait_token) = self.trait_token() { - Position::after(trait_token) - } else { - Position::last_child_of(self.syntax()) - }; - create_generic_param_list(position) - } - } - } - - fn get_or_create_where_clause(&self) -> ast::WhereClause { - if self.where_clause().is_none() { - let position = match self.semicolon_token() { - Some(tok) => Position::before(tok), - None => Position::last_child_of(self.syntax()), + let position = match (self.assoc_item_list(), self.semicolon_token()) { + (Some(items), _) => Position::before(items.syntax()), + (_, Some(tok)) => Position::before(tok), + (None, None) => Position::last_child_of(self.syntax()), }; create_where_clause(position); } @@ -273,28 +245,6 @@ pub trait AttrsOwnerEdit: ast::HasAttrs { } } } - - fn add_attr(&self, attr: ast::Attr) { - add_attr(self.syntax(), attr); - - fn add_attr(node: &SyntaxNode, attr: ast::Attr) { - let indent = IndentLevel::from_node(node); - attr.reindent_to(indent); - - let after_attrs_and_comments = node - .children_with_tokens() - .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) - .map_or(Position::first_child_of(node), Position::before); - - ted::insert_all( - after_attrs_and_comments, - vec![ - attr.syntax().clone().into(), - make::tokens::whitespace(&format!("\n{indent}")).into(), - ], - ) - } - } } impl AttrsOwnerEdit for T {} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index ceb2866ebcdf7..d60196d492fc3 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -1614,29 +1614,15 @@ impl Trait { #[inline] pub fn assoc_item_list(&self) -> Option { support::child(&self.syntax) } #[inline] - pub fn auto_token(&self) -> Option { support::token(&self.syntax, T![auto]) } - #[inline] - pub fn trait_token(&self) -> Option { support::token(&self.syntax, T![trait]) } - #[inline] - pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } -} -pub struct TraitAlias { - pub(crate) syntax: SyntaxNode, -} -impl ast::HasAttrs for TraitAlias {} -impl ast::HasDocComments for TraitAlias {} -impl ast::HasGenericParams for TraitAlias {} -impl ast::HasName for TraitAlias {} -impl ast::HasVisibility for TraitAlias {} -impl TraitAlias { - #[inline] - pub fn type_bound_list(&self) -> Option { support::child(&self.syntax) } - #[inline] pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } #[inline] pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } #[inline] + pub fn auto_token(&self) -> Option { support::token(&self.syntax, T![auto]) } + #[inline] pub fn trait_token(&self) -> Option { support::token(&self.syntax, T![trait]) } + #[inline] + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } } pub struct TryExpr { pub(crate) syntax: SyntaxNode, @@ -2107,7 +2093,6 @@ pub enum Item { Static(Static), Struct(Struct), Trait(Trait), - TraitAlias(TraitAlias), TypeAlias(TypeAlias), Union(Union), Use(Use), @@ -6801,42 +6786,6 @@ impl fmt::Debug for Trait { f.debug_struct("Trait").field("syntax", &self.syntax).finish() } } -impl AstNode for TraitAlias { - #[inline] - fn kind() -> SyntaxKind - where - Self: Sized, - { - TRAIT_ALIAS - } - #[inline] - fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT_ALIAS } - #[inline] - fn cast(syntax: SyntaxNode) -> Option { - if Self::can_cast(syntax.kind()) { - Some(Self { syntax }) - } else { - None - } - } - #[inline] - fn syntax(&self) -> &SyntaxNode { &self.syntax } -} -impl hash::Hash for TraitAlias { - fn hash(&self, state: &mut H) { self.syntax.hash(state); } -} -impl Eq for TraitAlias {} -impl PartialEq for TraitAlias { - fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax } -} -impl Clone for TraitAlias { - fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } } -} -impl fmt::Debug for TraitAlias { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TraitAlias").field("syntax", &self.syntax).finish() - } -} impl AstNode for TryExpr { #[inline] fn kind() -> SyntaxKind @@ -8471,10 +8420,6 @@ impl From for Item { #[inline] fn from(node: Trait) -> Item { Item::Trait(node) } } -impl From for Item { - #[inline] - fn from(node: TraitAlias) -> Item { Item::TraitAlias(node) } -} impl From for Item { #[inline] fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) } @@ -8506,7 +8451,6 @@ impl AstNode for Item { | STATIC | STRUCT | TRAIT - | TRAIT_ALIAS | TYPE_ALIAS | UNION | USE @@ -8529,7 +8473,6 @@ impl AstNode for Item { STATIC => Item::Static(Static { syntax }), STRUCT => Item::Struct(Struct { syntax }), TRAIT => Item::Trait(Trait { syntax }), - TRAIT_ALIAS => Item::TraitAlias(TraitAlias { syntax }), TYPE_ALIAS => Item::TypeAlias(TypeAlias { syntax }), UNION => Item::Union(Union { syntax }), USE => Item::Use(Use { syntax }), @@ -8554,7 +8497,6 @@ impl AstNode for Item { Item::Static(it) => &it.syntax, Item::Struct(it) => &it.syntax, Item::Trait(it) => &it.syntax, - Item::TraitAlias(it) => &it.syntax, Item::TypeAlias(it) => &it.syntax, Item::Union(it) => &it.syntax, Item::Use(it) => &it.syntax, @@ -8984,7 +8926,6 @@ impl AstNode for AnyHasAttrs { | STMT_LIST | STRUCT | TRAIT - | TRAIT_ALIAS | TRY_EXPR | TUPLE_EXPR | TUPLE_FIELD @@ -9257,10 +9198,6 @@ impl From for AnyHasAttrs { #[inline] fn from(node: Trait) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } } -impl From for AnyHasAttrs { - #[inline] - fn from(node: TraitAlias) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } -} impl From for AnyHasAttrs { #[inline] fn from(node: TryExpr) -> AnyHasAttrs { AnyHasAttrs { syntax: node.syntax } } @@ -9330,7 +9267,6 @@ impl AstNode for AnyHasDocComments { | STATIC | STRUCT | TRAIT - | TRAIT_ALIAS | TUPLE_FIELD | TYPE_ALIAS | UNION @@ -9420,10 +9356,6 @@ impl From for AnyHasDocComments { #[inline] fn from(node: Trait) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } } -impl From for AnyHasDocComments { - #[inline] - fn from(node: TraitAlias) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } -} impl From for AnyHasDocComments { #[inline] fn from(node: TupleField) -> AnyHasDocComments { AnyHasDocComments { syntax: node.syntax } } @@ -9488,7 +9420,7 @@ impl ast::HasGenericParams for AnyHasGenericParams {} impl AstNode for AnyHasGenericParams { #[inline] fn can_cast(kind: SyntaxKind) -> bool { - matches!(kind, CONST | ENUM | FN | IMPL | STRUCT | TRAIT | TRAIT_ALIAS | TYPE_ALIAS | UNION) + matches!(kind, CONST | ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION) } #[inline] fn cast(syntax: SyntaxNode) -> Option { @@ -9536,10 +9468,6 @@ impl From for AnyHasGenericParams { #[inline] fn from(node: Trait) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } } -impl From for AnyHasGenericParams { - #[inline] - fn from(node: TraitAlias) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } -} impl From for AnyHasGenericParams { #[inline] fn from(node: TypeAlias) -> AnyHasGenericParams { AnyHasGenericParams { syntax: node.syntax } } @@ -9646,7 +9574,6 @@ impl AstNode for AnyHasName { | STATIC | STRUCT | TRAIT - | TRAIT_ALIAS | TYPE_ALIAS | TYPE_PARAM | UNION @@ -9739,10 +9666,6 @@ impl From for AnyHasName { #[inline] fn from(node: Trait) -> AnyHasName { AnyHasName { syntax: node.syntax } } } -impl From for AnyHasName { - #[inline] - fn from(node: TraitAlias) -> AnyHasName { AnyHasName { syntax: node.syntax } } -} impl From for AnyHasName { #[inline] fn from(node: TypeAlias) -> AnyHasName { AnyHasName { syntax: node.syntax } } @@ -9832,7 +9755,6 @@ impl AstNode for AnyHasVisibility { | STATIC | STRUCT | TRAIT - | TRAIT_ALIAS | TUPLE_FIELD | TYPE_ALIAS | UNION @@ -9910,10 +9832,6 @@ impl From for AnyHasVisibility { #[inline] fn from(node: Trait) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } } -impl From for AnyHasVisibility { - #[inline] - fn from(node: TraitAlias) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } -} impl From for AnyHasVisibility { #[inline] fn from(node: TupleField) -> AnyHasVisibility { AnyHasVisibility { syntax: node.syntax } } @@ -10639,11 +10557,6 @@ impl std::fmt::Display for Trait { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for TraitAlias { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self.syntax(), f) - } -} impl std::fmt::Display for TryExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index daeb79cf081dc..051c5835571bc 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -190,6 +190,7 @@ fn ty_from_text(text: &str) -> ast::Type { } pub fn ty_alias( + attrs: impl IntoIterator, ident: &str, generic_param_list: Option, type_param_bounds: Option, @@ -200,6 +201,7 @@ pub fn ty_alias( let assignment_where = assignment_where.flatten(); quote! { TypeAlias { + #(#attrs "\n")* [type] " " Name { [IDENT ident] } #generic_param_list @@ -229,6 +231,23 @@ pub fn ty_fn_ptr>( } } +pub fn item_list(body: Option>) -> ast::ItemList { + let is_break_braces = body.is_some(); + let body_newline = if is_break_braces { "\n" } else { "" }; + let body_indent = if is_break_braces { " " } else { "" }; + + let body = match body { + Some(bd) => bd.iter().map(|elem| elem.to_string()).join("\n\n "), + None => String::new(), + }; + ast_from_text(&format!("mod C {{{body_newline}{body_indent}{body}{body_newline}}}")) +} + +pub fn mod_(name: ast::Name, body: Option) -> ast::Module { + let body = body.map_or(";".to_owned(), |body| format!(" {body}")); + ast_from_text(&format!("mod {name}{body}")) +} + pub fn assoc_item_list(body: Option>) -> ast::AssocItemList { let is_break_braces = body.is_some(); let body_newline = if is_break_braces { "\n".to_owned() } else { String::new() }; @@ -277,12 +296,16 @@ fn merge_where_clause( } pub fn impl_( + attrs: impl IntoIterator, generic_params: Option, generic_args: Option, path_type: ast::Type, where_clause: Option, body: Option, ) -> ast::Impl { + let attrs = + attrs.into_iter().fold(String::new(), |mut acc, attr| format_to_acc!(acc, "{}\n", attr)); + let gen_args = generic_args.map_or_else(String::new, |it| it.to_string()); let gen_params = generic_params.map_or_else(String::new, |it| it.to_string()); @@ -295,10 +318,11 @@ pub fn impl_( }; let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string()); - ast_from_text(&format!("impl{gen_params} {path_type}{gen_args}{where_clause}{body}")) + ast_from_text(&format!("{attrs}impl{gen_params} {path_type}{gen_args}{where_clause}{body}")) } pub fn impl_trait( + attrs: impl IntoIterator, is_unsafe: bool, trait_gen_params: Option, trait_gen_args: Option, @@ -311,6 +335,8 @@ pub fn impl_trait( ty_where_clause: Option, body: Option, ) -> ast::Impl { + let attrs = + attrs.into_iter().fold(String::new(), |mut acc, attr| format_to_acc!(acc, "{}\n", attr)); let is_unsafe = if is_unsafe { "unsafe " } else { "" }; let trait_gen_args = trait_gen_args.map(|args| args.to_string()).unwrap_or_default(); @@ -334,7 +360,7 @@ pub fn impl_trait( let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string()); ast_from_text(&format!( - "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{body}" + "{attrs}{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{body}" )) } @@ -452,12 +478,18 @@ pub fn use_tree_list(use_trees: impl IntoIterator) -> ast:: ast_from_text(&format!("use {{{use_trees}}};")) } -pub fn use_(visibility: Option, use_tree: ast::UseTree) -> ast::Use { +pub fn use_( + attrs: impl IntoIterator, + visibility: Option, + use_tree: ast::UseTree, +) -> ast::Use { + let attrs = + attrs.into_iter().fold(String::new(), |mut acc, attr| format_to_acc!(acc, "{}\n", attr)); let visibility = match visibility { None => String::new(), Some(it) => format!("{it} "), }; - ast_from_text(&format!("{visibility}use {use_tree};")) + ast_from_text(&format!("{attrs}{visibility}use {use_tree};")) } pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr { @@ -946,16 +978,19 @@ pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt { } pub fn item_const( + attrs: impl IntoIterator, visibility: Option, name: ast::Name, ty: ast::Type, expr: ast::Expr, ) -> ast::Const { + let attrs = + attrs.into_iter().fold(String::new(), |mut acc, attr| format_to_acc!(acc, "{}\n", attr)); let visibility = match visibility { None => String::new(), Some(it) => format!("{it} "), }; - ast_from_text(&format!("{visibility}const {name}: {ty} = {expr};")) + ast_from_text(&format!("{attrs}{visibility}const {name}: {ty} = {expr};")) } pub fn item_static( @@ -1162,6 +1197,7 @@ pub fn variant( } pub fn fn_( + attrs: impl IntoIterator, visibility: Option, fn_name: ast::Name, type_params: Option, @@ -1174,6 +1210,8 @@ pub fn fn_( is_unsafe: bool, is_gen: bool, ) -> ast::Fn { + let attrs = + attrs.into_iter().fold(String::new(), |mut acc, attr| format_to_acc!(acc, "{}\n", attr)); let type_params = match type_params { Some(type_params) => format!("{type_params}"), None => "".into(), @@ -1197,7 +1235,7 @@ pub fn fn_( let gen_literal = if is_gen { "gen " } else { "" }; ast_from_text(&format!( - "{visibility}{const_literal}{async_literal}{gen_literal}{unsafe_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}", + "{attrs}{visibility}{const_literal}{async_literal}{gen_literal}{unsafe_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}", )) } pub fn struct_( @@ -1206,23 +1244,29 @@ pub fn struct_( generic_param_list: Option, field_list: ast::FieldList, ) -> ast::Struct { - let semicolon = if matches!(field_list, ast::FieldList::TupleFieldList(_)) { ";" } else { "" }; + let (semicolon, ws) = + if matches!(field_list, ast::FieldList::TupleFieldList(_)) { (";", "") } else { ("", " ") }; let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string()); let visibility = match visibility { None => String::new(), Some(it) => format!("{it} "), }; - ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",)) + ast_from_text(&format!( + "{visibility}struct {strukt_name}{type_params}{ws}{field_list}{semicolon}" + )) } pub fn enum_( + attrs: impl IntoIterator, visibility: Option, enum_name: ast::Name, generic_param_list: Option, where_clause: Option, variant_list: ast::VariantList, ) -> ast::Enum { + let attrs = + attrs.into_iter().fold(String::new(), |mut acc, attr| format_to_acc!(acc, "{}\n", attr)); let visibility = match visibility { None => String::new(), Some(it) => format!("{it} "), @@ -1232,7 +1276,7 @@ pub fn enum_( let where_clause = where_clause.map(|it| format!(" {it}")).unwrap_or_default(); ast_from_text(&format!( - "{visibility}enum {enum_name}{generic_params}{where_clause} {variant_list}" + "{attrs}{visibility}enum {enum_name}{generic_params}{where_clause} {variant_list}" )) } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index 62a7d4df2cf6b..af741d100f680 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -12,8 +12,8 @@ use rowan::{GreenNodeData, GreenTokenData}; use crate::{ NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, T, TokenText, ast::{ - self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName, SyntaxNode, - support, + self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName, + HasTypeBounds, SyntaxNode, support, }, ted, }; @@ -880,51 +880,6 @@ impl AstNode for TypeOrConstParam { impl HasAttrs for TypeOrConstParam {} -#[derive(Debug, Clone)] -pub enum TraitOrAlias { - Trait(ast::Trait), - TraitAlias(ast::TraitAlias), -} - -impl TraitOrAlias { - pub fn name(&self) -> Option { - match self { - TraitOrAlias::Trait(x) => x.name(), - TraitOrAlias::TraitAlias(x) => x.name(), - } - } -} - -impl AstNode for TraitOrAlias { - fn can_cast(kind: SyntaxKind) -> bool - where - Self: Sized, - { - matches!(kind, SyntaxKind::TRAIT | SyntaxKind::TRAIT_ALIAS) - } - - fn cast(syntax: SyntaxNode) -> Option - where - Self: Sized, - { - let res = match syntax.kind() { - SyntaxKind::TRAIT => TraitOrAlias::Trait(ast::Trait { syntax }), - SyntaxKind::TRAIT_ALIAS => TraitOrAlias::TraitAlias(ast::TraitAlias { syntax }), - _ => return None, - }; - Some(res) - } - - fn syntax(&self) -> &SyntaxNode { - match self { - TraitOrAlias::Trait(it) => it.syntax(), - TraitOrAlias::TraitAlias(it) => it.syntax(), - } - } -} - -impl HasAttrs for TraitOrAlias {} - pub enum VisibilityKind { In(ast::Path), PubCrate, @@ -957,11 +912,10 @@ impl ast::Visibility { impl ast::LifetimeParam { pub fn lifetime_bounds(&self) -> impl Iterator { - self.syntax() - .children_with_tokens() - .filter_map(|it| it.into_token()) - .skip_while(|x| x.kind() != T![:]) - .filter(|it| it.kind() == T![lifetime_ident]) + self.type_bound_list() + .into_iter() + .flat_map(|it| it.bounds()) + .filter_map(|it| it.lifetime()?.lifetime_ident_token()) } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs index 738a26fed5d82..8bf27f967482b 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs @@ -2,8 +2,8 @@ use crate::{ AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, ast::{ - self, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName, HasTypeBounds, - HasVisibility, RangeItem, make, + self, HasArgList, HasAttrs, HasGenericArgs, HasGenericParams, HasLoopBody, HasName, + HasTypeBounds, HasVisibility, RangeItem, make, }, syntax_editor::SyntaxMappingBuilder, }; @@ -107,8 +107,13 @@ impl SyntaxFactory { ast } - pub fn use_(&self, visibility: Option, use_tree: ast::UseTree) -> ast::Use { - make::use_(visibility, use_tree).clone_for_update() + pub fn use_( + &self, + attrs: impl IntoIterator, + visibility: Option, + use_tree: ast::UseTree, + ) -> ast::Use { + make::use_(attrs, visibility, use_tree).clone_for_update() } pub fn use_tree( @@ -840,16 +845,20 @@ impl SyntaxFactory { pub fn item_const( &self, + attrs: impl IntoIterator, visibility: Option, name: ast::Name, ty: ast::Type, expr: ast::Expr, ) -> ast::Const { - let ast = make::item_const(visibility.clone(), name.clone(), ty.clone(), expr.clone()) - .clone_for_update(); + let (attrs, attrs_input) = iterator_input(attrs); + let ast = + make::item_const(attrs, visibility.clone(), name.clone(), ty.clone(), expr.clone()) + .clone_for_update(); if let Some(mut mapping) = self.mappings() { let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_children(attrs_input, ast.attrs().map(|attr| attr.syntax().clone())); if let Some(visibility) = visibility { builder.map_node( visibility.syntax().clone(), @@ -1067,6 +1076,7 @@ impl SyntaxFactory { pub fn item_enum( &self, + attrs: impl IntoIterator, visibility: Option, name: ast::Name, generic_param_list: Option, @@ -1074,6 +1084,7 @@ impl SyntaxFactory { variant_list: ast::VariantList, ) -> ast::Enum { let ast = make::enum_( + attrs, visibility.clone(), name.clone(), generic_param_list.clone(), @@ -1182,6 +1193,7 @@ impl SyntaxFactory { pub fn fn_( &self, + attrs: impl IntoIterator, visibility: Option, fn_name: ast::Name, type_params: Option, @@ -1194,7 +1206,9 @@ impl SyntaxFactory { is_unsafe: bool, is_gen: bool, ) -> ast::Fn { + let (attrs, input) = iterator_input(attrs); let ast = make::fn_( + attrs, visibility.clone(), fn_name.clone(), type_params.clone(), @@ -1210,6 +1224,7 @@ impl SyntaxFactory { if let Some(mut mapping) = self.mappings() { let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_children(input, ast.attrs().map(|attr| attr.syntax().clone())); if let Some(visibility) = visibility { builder.map_node( diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs index 18f5015e9eabd..0b358878fcf23 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs @@ -618,6 +618,7 @@ mod tests { #[test] fn test_replace_token_in_parent() { let parent_fn = make::fn_( + None, None, make::name("it"), None, diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index 4413d2f222c15..7574d12c0cfd0 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -1,10 +1,11 @@ //! A set of high-level utility fixture methods to use in tests. use std::{any::TypeId, mem, str::FromStr, sync}; +use base_db::target::TargetData; use base_db::{ Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData, - DependencyBuilder, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabase, SourceRoot, - Version, VfsPath, salsa, + DependencyBuilder, Env, FileChange, FileSet, FxIndexMap, LangCrateOrigin, SourceDatabase, + SourceRoot, Version, VfsPath, salsa, }; use cfg::CfgOptions; use hir_expand::{ @@ -20,7 +21,6 @@ use hir_expand::{ }; use intern::{Symbol, sym}; use paths::AbsPathBuf; -use rustc_hash::FxHashMap; use span::{Edition, FileId, Span}; use stdx::itertools::Itertools; use test_utils::{ @@ -137,8 +137,11 @@ impl ChangeFixture { proc_macro_names, toolchain, target_data_layout, + target_arch, } = FixtureWithProjectMeta::parse(ra_fixture); - let target_data_layout = Ok(target_data_layout.into()); + let target_data_layout = target_data_layout.into(); + let target_arch = parse_target_arch(&target_arch); + let target = Ok(TargetData { arch: target_arch, data_layout: target_data_layout }); let toolchain = Some({ let channel = toolchain.as_deref().unwrap_or("stable"); Version::parse(&format!("1.76.0-{channel}")).unwrap() @@ -147,7 +150,7 @@ impl ChangeFixture { let mut files = Vec::new(); let mut crate_graph = CrateGraphBuilder::default(); - let mut crates = FxHashMap::default(); + let mut crates = FxIndexMap::default(); let mut crate_deps = Vec::new(); let mut default_crate_root: Option = None; let mut default_edition = Edition::CURRENT; @@ -164,8 +167,7 @@ impl ChangeFixture { let mut file_position = None; - let crate_ws_data = - Arc::new(CrateWorkspaceData { data_layout: target_data_layout, toolchain }); + let crate_ws_data = Arc::new(CrateWorkspaceData { target, toolchain }); // FIXME: This is less than ideal let proc_macro_cwd = Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())); @@ -249,37 +251,7 @@ impl ChangeFixture { file_id = FileId::from_raw(file_id.index() + 1); } - if crates.is_empty() { - let crate_root = default_crate_root - .expect("missing default crate root, specify a main.rs or lib.rs"); - crate_graph.add_crate_root( - crate_root, - default_edition, - Some(CrateName::new("ra_test_fixture").unwrap().into()), - None, - default_cfg.clone(), - Some(default_cfg), - default_env, - CrateOrigin::Local { repo: None, name: None }, - false, - proc_macro_cwd.clone(), - crate_ws_data.clone(), - ); - } else { - for (from, to, prelude) in crate_deps { - let from_id = crates[&from]; - let to_id = crates[&to]; - let sysroot = crate_graph[to_id].basic.origin.is_lang(); - crate_graph - .add_dep( - from_id, - DependencyBuilder::with_prelude(to.clone(), to_id, prelude, sysroot), - ) - .unwrap(); - } - } - - if let Some(mini_core) = mini_core { + let mini_core = mini_core.map(|mini_core| { let core_file = file_id; file_id = FileId::from_raw(file_id.index() + 1); @@ -289,8 +261,6 @@ impl ChangeFixture { source_change.change_file(core_file, Some(mini_core.source_code())); - let all_crates = crate_graph.iter().collect::>(); - let core_crate = crate_graph.add_crate_root( core_file, Edition::CURRENT, @@ -308,16 +278,58 @@ impl ChangeFixture { crate_ws_data.clone(), ); - for krate in all_crates { + ( + move || { + DependencyBuilder::with_prelude( + CrateName::new("core").unwrap(), + core_crate, + true, + true, + ) + }, + core_crate, + ) + }); + + if crates.is_empty() { + let crate_root = default_crate_root + .expect("missing default crate root, specify a main.rs or lib.rs"); + let root = crate_graph.add_crate_root( + crate_root, + default_edition, + Some(CrateName::new("ra_test_fixture").unwrap().into()), + None, + default_cfg.clone(), + Some(default_cfg), + default_env, + CrateOrigin::Local { repo: None, name: None }, + false, + proc_macro_cwd.clone(), + crate_ws_data.clone(), + ); + if let Some((mini_core, _)) = mini_core { + crate_graph.add_dep(root, mini_core()).unwrap(); + } + } else { + // Insert minicore first to match with `project-model::workspace` + if let Some((mini_core, core_crate)) = mini_core { + let all_crates = crate_graph.iter().collect::>(); + for krate in all_crates { + if krate == core_crate { + continue; + } + crate_graph.add_dep(krate, mini_core()).unwrap(); + } + } + + for (from, to, prelude) in crate_deps { + let from_id = crates[&from]; + let to_id = crates[&to]; + let sysroot = crate_graph[to_id].basic.origin.is_lang(); crate_graph .add_dep( - krate, - DependencyBuilder::with_prelude( - CrateName::new("core").unwrap(), - core_crate, - true, - true, - ), + from_id, + DependencyBuilder::with_prelude(to.clone(), to_id, prelude, sysroot), ) .unwrap(); } @@ -386,6 +398,15 @@ impl ChangeFixture { } } +fn parse_target_arch(arch: &str) -> base_db::target::Arch { + use base_db::target::Arch::*; + match arch { + "wasm32" => Wasm32, + "wasm64" => Wasm64, + _ => Other, + } +} + fn default_test_proc_macros() -> Box<[(String, ProcMacro)]> { Box::new([ ( @@ -627,11 +648,23 @@ impl FileMeta { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum ForceNoneLangOrigin { + Yes, + No, +} + fn parse_crate( crate_str: String, current_source_root_kind: SourceRootKind, explicit_non_workspace_member: bool, ) -> (String, CrateOrigin, Option) { + let (crate_str, force_non_lang_origin) = if let Some(s) = crate_str.strip_prefix("r#") { + (s.to_owned(), ForceNoneLangOrigin::Yes) + } else { + (crate_str, ForceNoneLangOrigin::No) + }; + // syntax: // "my_awesome_crate" // "my_awesome_crate@0.0.1,http://example.com" @@ -646,16 +679,25 @@ fn parse_crate( let non_workspace_member = explicit_non_workspace_member || matches!(current_source_root_kind, SourceRootKind::Library); - let origin = match LangCrateOrigin::from(&*name) { - LangCrateOrigin::Other => { - let name = Symbol::intern(&name); - if non_workspace_member { - CrateOrigin::Library { repo, name } - } else { - CrateOrigin::Local { repo, name: Some(name) } + let origin = if force_non_lang_origin == ForceNoneLangOrigin::Yes { + let name = Symbol::intern(&name); + if non_workspace_member { + CrateOrigin::Library { repo, name } + } else { + CrateOrigin::Local { repo, name: Some(name) } + } + } else { + match LangCrateOrigin::from(&*name) { + LangCrateOrigin::Other => { + let name = Symbol::intern(&name); + if non_workspace_member { + CrateOrigin::Library { repo, name } + } else { + CrateOrigin::Local { repo, name: Some(name) } + } } + origin => CrateOrigin::Lang(origin), } - origin => CrateOrigin::Lang(origin), }; (name, origin, version) diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs index e830c6a7cf688..c024089a016f9 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs @@ -149,6 +149,8 @@ pub struct FixtureWithProjectMeta { /// You probably don't want to manually specify this. See LLVM manual for the /// syntax, if you must: pub target_data_layout: String, + /// Specifies the target architecture. + pub target_arch: String, } impl FixtureWithProjectMeta { @@ -178,6 +180,7 @@ impl FixtureWithProjectMeta { let mut toolchain = None; let mut target_data_layout = "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(); + let mut target_arch = "x86_64".to_owned(); let mut mini_core = None; let mut res: Vec = Vec::new(); let mut proc_macro_names = vec![]; @@ -194,6 +197,12 @@ impl FixtureWithProjectMeta { fixture = remain; } + if let Some(meta) = fixture.strip_prefix("//- target_arch:") { + let (meta, remain) = meta.split_once('\n').unwrap(); + meta.trim().clone_into(&mut target_arch); + fixture = remain; + } + if let Some(meta) = fixture.strip_prefix("//- proc_macros:") { let (meta, remain) = meta.split_once('\n').unwrap(); proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect(); @@ -232,7 +241,14 @@ impl FixtureWithProjectMeta { } } - Self { fixture: res, mini_core, proc_macro_names, toolchain, target_data_layout } + Self { + fixture: res, + mini_core, + proc_macro_names, + toolchain, + target_data_layout, + target_arch, + } } //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo @@ -511,6 +527,7 @@ fn parse_fixture_gets_full_meta() { proc_macro_names, toolchain, target_data_layout: _, + target_arch: _, } = FixtureWithProjectMeta::parse( r#" //- toolchain: nightly diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 7b719b5dec754..696928b522f94 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -35,7 +35,7 @@ //! error: fmt //! fmt: option, result, transmute, coerce_unsized, copy, clone, derive //! fmt_before_1_89_0: fmt -//! fn: tuple +//! fn: sized, tuple //! from: sized, result //! future: pin //! coroutine: pin @@ -55,7 +55,7 @@ //! panic: fmt //! phantom_data: //! pin: -//! pointee: copy, send, sync, ord, hash, unpin +//! pointee: copy, send, sync, ord, hash, unpin, phantom_data //! range: //! receiver: deref //! result: @@ -70,6 +70,7 @@ //! tuple: //! unpin: sized //! unsize: sized +//! write: fmt //! todo: panic //! unimplemented: panic //! column: @@ -168,6 +169,17 @@ pub mod marker { // region:phantom_data #[lang = "phantom_data"] pub struct PhantomData; + + // region:clone + impl Clone for PhantomData { + fn clone(&self) -> Self { Self } + } + // endregion:clone + + // region:copy + impl Copy for PhantomData {} + // endregion:copy + // endregion:phantom_data // region:discriminant @@ -325,6 +337,18 @@ pub mod clone { *self } } + + impl Clone for [T; 0] { + fn clone(&self) -> Self { + [] + } + } + + impl Clone for [T; 1] { + fn clone(&self) -> Self { + [self[0].clone()] + } + } // endregion:builtin_impls // region:derive @@ -491,6 +515,16 @@ pub mod ptr { #[lang = "metadata_type"] type Metadata: Copy + Send + Sync + Ord + Hash + Unpin; } + + #[lang = "dyn_metadata"] + pub struct DynMetadata { + _phantom: crate::marker::PhantomData, + } + + pub const fn metadata(ptr: *const T) -> ::Metadata { + loop {} + } + // endregion:pointee // region:non_null #[rustc_layout_scalar_valid_range_start(1)] @@ -1035,6 +1069,7 @@ pub mod ops { #[lang = "coroutine"] pub trait Coroutine { + #[lang = "coroutine_yield"] type Yield; #[lang = "coroutine_return"] type Return; @@ -1123,7 +1158,7 @@ pub mod fmt { pub struct Error; pub type Result = crate::result::Result<(), Error>; - pub struct Formatter<'a>; + pub struct Formatter<'a>(&'a ()); pub struct DebugTuple; pub struct DebugStruct; impl Formatter<'_> { @@ -1596,6 +1631,12 @@ pub mod iter { { loop {} } + fn collect>(self) -> B + where + Self: Sized, + { + loop {} + } // endregion:iterators } impl Iterator for &mut I { @@ -1665,10 +1706,13 @@ pub mod iter { loop {} } } + pub trait FromIterator: Sized { + fn from_iter>(iter: T) -> Self; + } } - pub use self::collect::IntoIterator; + pub use self::collect::{IntoIterator, FromIterator}; } - pub use self::traits::{IntoIterator, Iterator}; + pub use self::traits::{IntoIterator, FromIterator, Iterator}; } // endregion:iterator @@ -1769,6 +1813,26 @@ mod macros { } // endregion:panic + // region:write + #[macro_export] + macro_rules! write { + ($dst:expr, $($arg:tt)*) => { + $dst.write_fmt($crate::format_args!($($arg)*)) + }; + } + + #[macro_export] + #[allow_internal_unstable(format_args_nl)] + macro_rules! writeln { + ($dst:expr $(,)?) => { + $crate::write!($dst, "\n") + }; + ($dst:expr, $($arg:tt)*) => { + $dst.write_fmt($crate::format_args_nl!($($arg)*)) + }; + } + // endregion:write + // region:assert #[macro_export] #[rustc_builtin_macro] @@ -1944,7 +2008,7 @@ pub mod prelude { convert::AsRef, // :as_ref convert::{From, Into, TryFrom, TryInto}, // :from default::Default, // :default - iter::{IntoIterator, Iterator}, // :iterator + iter::{IntoIterator, Iterator, FromIterator}, // :iterator macros::builtin::{derive, derive_const}, // :derive marker::Copy, // :copy marker::Send, // :send diff --git a/src/tools/rust-analyzer/docs/book/README.md b/src/tools/rust-analyzer/docs/book/README.md index 0a3161f3af38d..11f7e8f98ca56 100644 --- a/src/tools/rust-analyzer/docs/book/README.md +++ b/src/tools/rust-analyzer/docs/book/README.md @@ -8,6 +8,7 @@ To run the documentation site locally: ```shell cargo install mdbook +cargo install mdbook-toc cargo xtask codegen cd docs/book mdbook serve diff --git a/src/tools/rust-analyzer/docs/book/src/README.md b/src/tools/rust-analyzer/docs/book/src/README.md index 71f34e0346646..060bcf0cea399 100644 --- a/src/tools/rust-analyzer/docs/book/src/README.md +++ b/src/tools/rust-analyzer/docs/book/src/README.md @@ -1,13 +1,20 @@ # rust-analyzer -At its core, rust-analyzer is a **library** for semantic analysis of -Rust code as it changes over time. This manual focuses on a specific -usage of the library -- running it as part of a server that implements +rust-analyzer is a language server that provides IDE functionality for +writing Rust programs. You can use it with any editor that supports the [Language Server -Protocol](https://microsoft.github.io/language-server-protocol/) (LSP). -The LSP allows various code editors, like VS Code, Emacs or Vim, to -implement semantic features like completion or goto definition by -talking to an external language server process. +Protocol](https://microsoft.github.io/language-server-protocol/) (VS +Code, Vim, Emacs, Zed, etc). + +rust-analyzer features include go-to-definition, find-all-references, +refactorings and code completion. rust-analyzer also supports +integrated formatting (with rustfmt) and integrated diagnostics (with +rustc and clippy). + +Internally, rust-analyzer is structured as a set of libraries for +analyzing Rust code. See +[Architecture](https://rust-analyzer.github.io/book/contributing/architecture.html) +for more details. To improve this document, send a pull request: [https://github.com/rust-lang/rust-analyzer](https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/README.md) diff --git a/src/tools/rust-analyzer/docs/book/src/SUMMARY.md b/src/tools/rust-analyzer/docs/book/src/SUMMARY.md index dffdae94a6e86..3fb46d59a4a50 100644 --- a/src/tools/rust-analyzer/docs/book/src/SUMMARY.md +++ b/src/tools/rust-analyzer/docs/book/src/SUMMARY.md @@ -23,3 +23,4 @@ - [Setup](contributing/setup.md) - [Style](contributing/style.md) - [Syntax](contributing/syntax.md) + - [Writing Tests](contributing/testing.md) diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md index 99a30d8f62138..50dacd88f4072 100644 --- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md +++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md @@ -610,6 +610,13 @@ The warnings will be indicated by a blue squiggly underline in code and a blue i the `Problems Panel`. +## rust-analyzer.document.symbol.search.excludeLocals {#document.symbol.search.excludeLocals} + +Default: `true` + +Exclude all locals from document symbol search. + + ## rust-analyzer.files.exclude {#files.exclude} Default: `[]` @@ -952,6 +959,17 @@ Default: `"never"` Show enum variant discriminant hints. +## rust-analyzer.inlayHints.expressionAdjustmentHints.disableReborrows {#inlayHints.expressionAdjustmentHints.disableReborrows} + +Default: `true` + +Disable reborrows in expression adjustments inlay hints. + +Reborrows are a pair of a builtin deref then borrow, i.e. `&*`. They are inserted by the compiler but are mostly useless to the programmer. + +Note: if the deref is not builtin (an overloaded deref), or the borrow is `&raw const`/`&raw mut`, they are not removed. + + ## rust-analyzer.inlayHints.expressionAdjustmentHints.enable {#inlayHints.expressionAdjustmentHints.enable} Default: `"never"` @@ -1360,6 +1378,17 @@ Enables the use of rustfmt's unstable range formatting command for the available on a nightly build. +## rust-analyzer.semanticHighlighting.comments.enable {#semanticHighlighting.comments.enable} + +Default: `true` + +Use semantic tokens for comments. + +In some editors (e.g. vscode) semantic tokens override other highlighting grammars. +By disabling semantic tokens for comments, other grammars can be used to highlight +their contents. + + ## rust-analyzer.semanticHighlighting.doc.comment.inject.enable {#semanticHighlighting.doc.comment.inject.enable} Default: `true` diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/README.md b/src/tools/rust-analyzer/docs/book/src/contributing/README.md index 57c7a9c5996d1..ad2816b18ac14 100644 --- a/src/tools/rust-analyzer/docs/book/src/contributing/README.md +++ b/src/tools/rust-analyzer/docs/book/src/contributing/README.md @@ -276,7 +276,7 @@ There are two sets of people with extra permissions: Feel free to request a review or assign any PR to a reviewer with the relevant expertise to bring the work to their attention. Don't feel pressured to review assigned PRs though. If you don't feel like reviewing for whatever reason, someone else will pick the review up (but please speak up if you don't feel like it)! -* The [rust-lang](https://github.com/rust-lang) team [t-rust-analyzer-contributors]([https://github.com/orgs/rust-analyzer/teams/triage](https://github.com/rust-lang/team/blob/master/teams/rust-analyzer-contributors.toml)). +* The [rust-lang](https://github.com/rust-lang) team [t-rust-analyzer-contributors](https://github.com/rust-lang/team/blob/master/teams/rust-analyzer-contributors.toml). This team has general triaging permissions allowing to label, close and re-open issues. ## Synchronizing subtree changes diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/style.md b/src/tools/rust-analyzer/docs/book/src/contributing/style.md index 746f3eb132117..fe09fb6c2fd52 100644 --- a/src/tools/rust-analyzer/docs/book/src/contributing/style.md +++ b/src/tools/rust-analyzer/docs/book/src/contributing/style.md @@ -101,7 +101,7 @@ Including a description and GIF suitable for the changelog means less work for t ## Clippy -We use Clippy to improve the code, but if some lints annoy you, allow them in the [Cargo.toml](../../Cargo.toml) [workspace.lints.clippy] section. +We use Clippy to improve the code, but if some lints annoy you, allow them in the [Cargo.toml](https://github.com/rust-lang/rust-analyzer/blob/master/Cargo.toml) [workspace.lints.clippy] section. # Code diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/testing.md b/src/tools/rust-analyzer/docs/book/src/contributing/testing.md new file mode 100644 index 0000000000000..ccee9b847b6e1 --- /dev/null +++ b/src/tools/rust-analyzer/docs/book/src/contributing/testing.md @@ -0,0 +1,106 @@ +rust-analyzer's testing is based on *snapshot tests*: a test is a piece of input text, usually a Rust code, and some output text. There is then some testing helper that runs the feature on the input text and compares the result to the output text. + +rust-analyzer uses a combination of the crate [`expect-test`](https://docs.rs/expect-test) and a custom testing framework. + +This all may sound too abstract, so let's demonstrate with an example. + +Type inference tests are located at `crates/hir-ty/src/tests`. There are various test helpers you can use. One of the simplest is `check_no_mismatches()`: it is given a piece of Rust code (we'll talk more about Rust code in tests later) and asserts that there are no type mismatches in it, that is, one type was expected but another was found (for example, `let x: () = 1` is a type mismatch). Note that we determine type mismatches via rust-analyzer's own analysis, not via the compiler (this is what we are testing, after all), which means there are often missed mismatches and sometimes bogus ones as well. + +For example, the following test will fail: +```rust +#[test] +fn this_will_fail() { + check_no_mismatches( + r#" +fn main() { + let x: () = 1; +} + "#, + ); +} +``` + +Sometimes we want to check more that there are no type mismatches. For that we use other helpers. For example, often we want to assert that the type of some expression is some specific type. For that we use the `check_types()` function. It takes a Rust code string with custom annotation, that are common in our test suite. The general scheme of annotation is: + + - `$0` marks a position. What to do with it is determined by the testing helper. Commonly it denotes the cursor position in IDE tests (for example, hover). + - `$0...$0` marks a range, commonly a selection in IDE tests. + - `^...^`, commonly seen in a comment (`// ^^^^`), labels the line above. For example, the following will attach the label `hey` to the range of the variable name `cool`: + + ```rust + let cool; + // ^^^^ hey + ``` + +`check_types()` uses labels to assert type: when you attach a label to a range, `check_types()` assert that the type of this range will be what written in the label. + +It's all too abstract without an example: +```rust +#[test] +fn my_test() { + check_types( + r#" +fn main() { + let x = 1; + // ^ i32 +} + "#, + ); +} +``` +Here, we assert that the type of the variable `x` is `i32`. Which is true, of course, so the test will pass. + +Oftentimes it is convenient to assert the types of all of the expressions at once, and that brings us to the last kind of test. It uses `expect-test` to match an output text: +```rust +#[test] +fn my_test() { + check_infer( + r#" +fn main() { + let x = 1; +} + "#, + expect![[r#" + 10..28 '{ ...= 1; }': () + 20..21 'x': i32 + 24..25 '1': i32 + "#]], + ); +} +``` +The text inside the `expect![[]]` is determined by the helper, `check_infer()` in this case. For `check_infer()`, each line is a range in the source code (the range is counted in bytes and the source is trimmed, indentation is stripped), next to it there is the text in that range, or some part of it with `...` if it's too long, and finally comes the type of that range. + +The important feature of `expect-test` is that it allows easy update of the expectation. Say you changed something in the code, maybe fixed a bug, and the output in `expect![[]]` needs to change. Or maybe you are writing it from scratch. Writing it by hand is very tedious and prone to mistakes. But `expect-trait` has a magic. You can set the environment variable `UPDATE_EXPECT=1`, then run the test, and it will update automatically! Some editors (e.g. VSCode) make it even more convenient: on them, on the top of every test that uses `expect-test`, next to the usual `Run | Debug` buttons, rust-analyzer also shows an `Update Expect` button. Clicking it will run that test in updating mode. + +## Rust code in the tests + +The first thing that you probably already noticed is that the Rust code in the tests is syntax highlighted! In fact, it even uses semantic highlighting. rust-analyzer highlights strings "as if" they contain Rust code if they are passed to a parameter marked `#[rust_analyzer::rust_fixture]`, and rust-analyzer test helpers do that (in fact, this was designed for them). + +The syntax highlighting is very important, not just because it's nice to the eye: it's very easy to make mistakes in test code, and debugging that can be very hard. Often the test will just fail, printing an `{unknown}` type, and you'll have no clue what's going wrong. The syntax is the clue; if something isn't highlighted correctly, that probably means there is an error (there is one exception to this, which we'll discuss later). You can even set the semantic highlighting tag `unresolved_reference` to e.g. red, so you will see such things clearly. + +Still, often you won't know what's going wrong. Why you can't fix the test, or worse, you expect it to fail but it doesn't. You can try the code on a real IDE to be sure it works. Later we'll give some tips to fix the test. + +### The fixture + +The Rust code in a test is not, a fact, a single Rust file. It has a mini-language that allows you to express multiple files, multiple crates, different configs, and more. All options are documented in `crates/test-utils/src/fixture.rs`, but here are some of the common ones: + + - `//- minicore: flag1, flag2, ...`. This is by far the most common flag. Tests in rust-analyzer don't have access by default to any other type - not `Option`, not `Iterator`, not even `Sized`. This flag allows you to include parts of the `crates/test-utils/src/minicore.rs` file, which mimics `core`. All possible flags are listed at the top of `minicore` along with the flags they imply, then later you can see by `// region:flag` and `// endregion:flag` what code each flag enables. + - `// /path/to/file.rs crate:crate deps:dep_a,dep_b`. The first component is the filename of the code that follows (until the next file). It is required, but only if you supply this line. Other components in this line are optional. They include `crate:crate_name`, to start a new crate, or `deps:dep_a,dep_b`, to declare dependencies between crates. You can also declare modules as usual in Rust - just name your paths `/foo.rs` or `/foo/mod.rs`, declare `mod foo` and that's it! + +So the following snippet: +```rust +//- minicore: sized, fn +// /lib.rs crate:foo +pub mod bar; +// /bar.rs +pub struct Bar; +// /main.rs crate:main deps:foo +use foo::Bar; +``` +Declares two crates `foo` and `main` where `main` depends on `foo`, with dependency in `Sized` and the `FnX` traits from `core`, and a module of `foo` called `bar`. + +And as promised, here are some tips to make your test work: + + - If you use some type/trait, you must *always* include it in `minicore`. Note - not all types from core/std are available there, you can add new (under flags) if you need. And import them if they are not in the prelude. + - If you use unsized types (`dyn Trait`/slices), you may want to include some or all of the following `minicore` flags: `sized`, `unsize`, `coerce_unsized`, `dispatch_from_dyn`. + - If you use closures, consider including the `fn` minicore flag. Async closures need the `async_fn` flag. + - `sized` is commonly needed, consider adding it if you're stuck. diff --git a/src/tools/rust-analyzer/docs/book/src/faq.md b/src/tools/rust-analyzer/docs/book/src/faq.md index c87203309011b..8c143ab949357 100644 --- a/src/tools/rust-analyzer/docs/book/src/faq.md +++ b/src/tools/rust-analyzer/docs/book/src/faq.md @@ -5,3 +5,12 @@ rust-analyzer fails to resolve `None`, and thinks you are binding to a variable named `None`. That's usually a sign of a corrupted sysroot. Try removing and re-installing it: `rustup component remove rust-src` then `rustup component install rust-src`. + +### Rust Analyzer and Cargo compete over the build lock + +Rust Analyzer invokes Cargo in the background, and it can thus block manually executed +`cargo` commands from making progress (or vice-versa). In some cases, this can also cause +unnecessary recompilations caused by cache thrashing. To avoid this, you can configure +Rust Analyzer to use a [different target directory](./configuration.md#cargo.targetDir). +This will allow both the IDE and Cargo to make progress independently, at the cost of +increased disk space usage caused by the duplicated artifact directories. diff --git a/src/tools/rust-analyzer/editors/code/icon.png b/src/tools/rust-analyzer/editors/code/icon.png index 072090c6f4a1f..29a9679039010 100644 Binary files a/src/tools/rust-analyzer/editors/code/icon.png and b/src/tools/rust-analyzer/editors/code/icon.png differ diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json index 534c24be52e8d..ad8708e00c519 100644 --- a/src/tools/rust-analyzer/editors/code/package-lock.json +++ b/src/tools/rust-analyzer/editors/code/package-lock.json @@ -26,7 +26,7 @@ "@typescript-eslint/eslint-plugin": "^8.25.0", "@typescript-eslint/parser": "^8.25.0", "@vscode/test-electron": "^2.4.1", - "@vscode/vsce": "^3.2.2", + "@vscode/vsce": "^3.6.0", "esbuild": "^0.25.0", "eslint": "^9.21.0", "eslint-config-prettier": "^10.0.2", @@ -41,6 +41,23 @@ "vscode": "^1.93.0" } }, + "node_modules/@azu/format-text": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@azu/format-text/-/format-text-1.0.2.tgz", + "integrity": "sha512-Swi4N7Edy1Eqq82GxgEECXSSLyn6GOb5htRFPzBDdUkECGXtlf12ynO5oJSpWKPwCaUssOu7NfhDcCWpIC6Ywg==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@azu/style-format": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@azu/style-format/-/style-format-1.0.1.tgz", + "integrity": "sha512-AHcTojlNBdD/3/KxIKlg8sxIWHfOtQszLvOpagLTO+bjC3u7SAszu1lf//u7JJC50aUSH+BVWDD/KvaA6Gfn5g==", + "dev": true, + "license": "WTFPL", + "dependencies": { + "@azu/format-text": "^1.0.1" + } + }, "node_modules/@azure/abort-controller": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", @@ -212,6 +229,31 @@ "node": ">=16" } }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", + "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.25.0", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz", @@ -947,6 +989,217 @@ "node": ">= 8" } }, + "node_modules/@secretlint/config-creator": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/config-creator/-/config-creator-10.2.2.tgz", + "integrity": "sha512-BynOBe7Hn3LJjb3CqCHZjeNB09s/vgf0baBaHVw67w7gHF0d25c3ZsZ5+vv8TgwSchRdUCRrbbcq5i2B1fJ2QQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/types": "^10.2.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/config-loader": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/config-loader/-/config-loader-10.2.2.tgz", + "integrity": "sha512-ndjjQNgLg4DIcMJp4iaRD6xb9ijWQZVbd9694Ol2IszBIbGPPkwZHzJYKICbTBmh6AH/pLr0CiCaWdGJU7RbpQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/profiler": "^10.2.2", + "@secretlint/resolver": "^10.2.2", + "@secretlint/types": "^10.2.2", + "ajv": "^8.17.1", + "debug": "^4.4.1", + "rc-config-loader": "^4.1.3" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/config-loader/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@secretlint/config-loader/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/@secretlint/core": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/core/-/core-10.2.2.tgz", + "integrity": "sha512-6rdwBwLP9+TO3rRjMVW1tX+lQeo5gBbxl1I5F8nh8bgGtKwdlCMhMKsBWzWg1ostxx/tIG7OjZI0/BxsP8bUgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/profiler": "^10.2.2", + "@secretlint/types": "^10.2.2", + "debug": "^4.4.1", + "structured-source": "^4.0.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/formatter": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/formatter/-/formatter-10.2.2.tgz", + "integrity": "sha512-10f/eKV+8YdGKNQmoDUD1QnYL7TzhI2kzyx95vsJKbEa8akzLAR5ZrWIZ3LbcMmBLzxlSQMMccRmi05yDQ5YDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/resolver": "^10.2.2", + "@secretlint/types": "^10.2.2", + "@textlint/linter-formatter": "^15.2.0", + "@textlint/module-interop": "^15.2.0", + "@textlint/types": "^15.2.0", + "chalk": "^5.4.1", + "debug": "^4.4.1", + "pluralize": "^8.0.0", + "strip-ansi": "^7.1.0", + "table": "^6.9.0", + "terminal-link": "^4.0.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/formatter/node_modules/chalk": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.5.0.tgz", + "integrity": "sha512-1tm8DTaJhPBG3bIkVeZt1iZM9GfSX2lzOeDVZH9R9ffRHpmHvxZ/QhgQH/aDTkswQVt+YHdXAdS/In/30OjCbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@secretlint/node": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/node/-/node-10.2.2.tgz", + "integrity": "sha512-eZGJQgcg/3WRBwX1bRnss7RmHHK/YlP/l7zOQsrjexYt6l+JJa5YhUmHbuGXS94yW0++3YkEJp0kQGYhiw1DMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/config-loader": "^10.2.2", + "@secretlint/core": "^10.2.2", + "@secretlint/formatter": "^10.2.2", + "@secretlint/profiler": "^10.2.2", + "@secretlint/source-creator": "^10.2.2", + "@secretlint/types": "^10.2.2", + "debug": "^4.4.1", + "p-map": "^7.0.3" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/profiler": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/profiler/-/profiler-10.2.2.tgz", + "integrity": "sha512-qm9rWfkh/o8OvzMIfY8a5bCmgIniSpltbVlUVl983zDG1bUuQNd1/5lUEeWx5o/WJ99bXxS7yNI4/KIXfHexig==", + "dev": true, + "license": "MIT" + }, + "node_modules/@secretlint/resolver": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/resolver/-/resolver-10.2.2.tgz", + "integrity": "sha512-3md0cp12e+Ae5V+crPQYGd6aaO7ahw95s28OlULGyclyyUtf861UoRGS2prnUrKh7MZb23kdDOyGCYb9br5e4w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@secretlint/secretlint-formatter-sarif": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/secretlint-formatter-sarif/-/secretlint-formatter-sarif-10.2.2.tgz", + "integrity": "sha512-ojiF9TGRKJJw308DnYBucHxkpNovDNu1XvPh7IfUp0A12gzTtxuWDqdpuVezL7/IP8Ua7mp5/VkDMN9OLp1doQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "node-sarif-builder": "^3.2.0" + } + }, + "node_modules/@secretlint/secretlint-rule-no-dotenv": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/secretlint-rule-no-dotenv/-/secretlint-rule-no-dotenv-10.2.2.tgz", + "integrity": "sha512-KJRbIShA9DVc5Va3yArtJ6QDzGjg3PRa1uYp9As4RsyKtKSSZjI64jVca57FZ8gbuk4em0/0Jq+uy6485wxIdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/types": "^10.2.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/secretlint-rule-preset-recommend": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/secretlint-rule-preset-recommend/-/secretlint-rule-preset-recommend-10.2.2.tgz", + "integrity": "sha512-K3jPqjva8bQndDKJqctnGfwuAxU2n9XNCPtbXVI5JvC7FnQiNg/yWlQPbMUlBXtBoBGFYp08A94m6fvtc9v+zA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/source-creator": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/source-creator/-/source-creator-10.2.2.tgz", + "integrity": "sha512-h6I87xJfwfUTgQ7irWq7UTdq/Bm1RuQ/fYhA3dtTIAop5BwSFmZyrchph4WcoEvbN460BWKmk4RYSvPElIIvxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/types": "^10.2.2", + "istextorbinary": "^9.5.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@secretlint/types": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/@secretlint/types/-/types-10.2.2.tgz", + "integrity": "sha512-Nqc90v4lWCXyakD6xNyNACBJNJ0tNCwj2WNk/7ivyacYHxiITVgmLUFXTBOeCdy79iz6HtN9Y31uw/jbLrdOAg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@stylistic/eslint-plugin": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-4.1.0.tgz", @@ -984,6 +1237,136 @@ "eslint": ">=9.0.0" } }, + "node_modules/@textlint/ast-node-types": { + "version": "15.2.1", + "resolved": "https://registry.npmjs.org/@textlint/ast-node-types/-/ast-node-types-15.2.1.tgz", + "integrity": "sha512-20fEcLPsXg81yWpApv4FQxrZmlFF/Ta7/kz1HGIL+pJo5cSTmkc+eCki3GpOPZIoZk0tbJU8hrlwUb91F+3SNQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@textlint/linter-formatter": { + "version": "15.2.1", + "resolved": "https://registry.npmjs.org/@textlint/linter-formatter/-/linter-formatter-15.2.1.tgz", + "integrity": "sha512-oollG/BHa07+mMt372amxHohteASC+Zxgollc1sZgiyxo4S6EuureV3a4QIQB0NecA+Ak3d0cl0WI/8nou38jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@azu/format-text": "^1.0.2", + "@azu/style-format": "^1.0.1", + "@textlint/module-interop": "15.2.1", + "@textlint/resolver": "15.2.1", + "@textlint/types": "15.2.1", + "chalk": "^4.1.2", + "debug": "^4.4.1", + "js-yaml": "^3.14.1", + "lodash": "^4.17.21", + "pluralize": "^2.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "table": "^6.9.0", + "text-table": "^0.2.0" + } + }, + "node_modules/@textlint/linter-formatter/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@textlint/linter-formatter/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/@textlint/linter-formatter/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@textlint/linter-formatter/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@textlint/linter-formatter/node_modules/pluralize": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-2.0.0.tgz", + "integrity": "sha512-TqNZzQCD4S42De9IfnnBvILN7HAW7riLqsCyp8lgjXeysyPlX5HhqKAcJHHHb9XskE4/a+7VGC9zzx8Ls0jOAw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@textlint/linter-formatter/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@textlint/linter-formatter/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@textlint/module-interop": { + "version": "15.2.1", + "resolved": "https://registry.npmjs.org/@textlint/module-interop/-/module-interop-15.2.1.tgz", + "integrity": "sha512-b/C/ZNrm05n1ypymDknIcpkBle30V2ZgE3JVqQlA9PnQV46Ky510qrZk6s9yfKgA3m1YRnAw04m8xdVtqjq1qg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@textlint/resolver": { + "version": "15.2.1", + "resolved": "https://registry.npmjs.org/@textlint/resolver/-/resolver-15.2.1.tgz", + "integrity": "sha512-FY3aK4tElEcOJVUsaMj4Zro4jCtKEEwUMIkDL0tcn6ljNcgOF7Em+KskRRk/xowFWayqDtdz5T3u7w/6fjjuJQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@textlint/types": { + "version": "15.2.1", + "resolved": "https://registry.npmjs.org/@textlint/types/-/types-15.2.1.tgz", + "integrity": "sha512-zyqNhSatK1cwxDUgosEEN43hFh3WCty9Zm2Vm3ogU566IYegifwqN54ey/CiRy/DiO4vMcFHykuQnh2Zwp6LLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@textlint/ast-node-types": "15.2.1" + } + }, "node_modules/@tsconfig/strictest": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/@tsconfig/strictest/-/strictest-2.0.5.tgz", @@ -1015,6 +1398,20 @@ "undici-types": "~6.20.0" } }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", + "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/sarif": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz", + "integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/vscode": { "version": "1.93.0", "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.93.0.tgz", @@ -1220,16 +1617,20 @@ } }, "node_modules/@vscode/vsce": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-3.2.2.tgz", - "integrity": "sha512-4TqdUq/yKlQTHcQMk/DamR632bq/+IJDomSbexOMee/UAYWqYm0XHWA6scGslsCpzY+sCWEhhl0nqdOB0XW1kw==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-3.6.0.tgz", + "integrity": "sha512-u2ZoMfymRNJb14aHNawnXJtXHLXDVKc1oKZaH4VELKT/9iWKRVgtQOdwxCgtwSxJoqYvuK4hGlBWQJ05wxADhg==", "dev": true, "license": "MIT", "dependencies": { "@azure/identity": "^4.1.0", + "@secretlint/node": "^10.1.1", + "@secretlint/secretlint-formatter-sarif": "^10.1.1", + "@secretlint/secretlint-rule-no-dotenv": "^10.1.1", + "@secretlint/secretlint-rule-preset-recommend": "^10.1.1", "@vscode/vsce-sign": "^2.0.0", "azure-devops-node-api": "^12.5.0", - "chalk": "^2.4.2", + "chalk": "^4.1.2", "cheerio": "^1.0.0-rc.9", "cockatiel": "^3.1.2", "commander": "^12.1.0", @@ -1243,6 +1644,7 @@ "minimatch": "^3.0.3", "parse-semver": "^1.1.1", "read": "^1.0.7", + "secretlint": "^10.1.1", "semver": "^7.5.2", "tmp": "^0.2.3", "typed-rest-client": "^1.8.4", @@ -1486,6 +1888,22 @@ "integrity": "sha512-PMqBCBvrOVDRqLGooQb+z+t1Q0PiPyurUQeZRR5uHBOVZcW8B04KMmnT12USnhpNX2wCPagWzLVppQMUG3u0Dw==", "license": "MIT" }, + "node_modules/ansi-escapes": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", + "integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "environment": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/ansi-regex": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", @@ -1500,16 +1918,18 @@ } }, "node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "license": "MIT", "dependencies": { - "color-convert": "^1.9.0" + "color-convert": "^2.0.1" }, "engines": { - "node": ">=4" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, "node_modules/argparse": { @@ -1519,6 +1939,16 @@ "dev": true, "license": "Python-2.0" }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -1564,6 +1994,22 @@ ], "license": "MIT" }, + "node_modules/binaryextensions": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/binaryextensions/-/binaryextensions-6.11.0.tgz", + "integrity": "sha512-sXnYK/Ij80TO3lcqZVV2YgfKN5QjUWIRk/XSm2J/4bd/lPko3lvk0O4ZppH6m+6hB2/GTu+ptNwVFe1xh+QLQw==", + "dev": true, + "license": "Artistic-2.0", + "dependencies": { + "editions": "^6.21.0" + }, + "engines": { + "node": ">=4" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, "node_modules/bl": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", @@ -1598,6 +2044,13 @@ "dev": true, "license": "ISC" }, + "node_modules/boundary": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/boundary/-/boundary-2.0.0.tgz", + "integrity": "sha512-rJKn5ooC9u8q13IMCrW0RSp31pxBCHE3y9V/tp3TdWSLf8Em3p6Di4NBpfzbJge9YjjFEsD0RtFEjtvHL5VyEA==", + "dev": true, + "license": "BSD-2-Clause" + }, "node_modules/brace-expansion": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", @@ -1720,18 +2173,20 @@ } }, "node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, "license": "MIT", "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">=4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" } }, "node_modules/cheerio": { @@ -1845,39 +2300,6 @@ "node": ">=8" } }, - "node_modules/cliui/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/cliui/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/cliui/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "license": "MIT" - }, "node_modules/cliui/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -1938,20 +2360,21 @@ } }, "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "license": "MIT", "dependencies": { - "color-name": "1.1.3" + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" } }, "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true, + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, "node_modules/combined-stream": { @@ -2469,9 +2892,9 @@ } }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2685,6 +3108,23 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/editions": { + "version": "6.22.0", + "resolved": "https://registry.npmjs.org/editions/-/editions-6.22.0.tgz", + "integrity": "sha512-UgGlf8IW75je7HZjNDpJdCv4cGJWIi6yumFdZ0R7A8/CIhQiWUjyGLCxdHpd8bmyD1gnkfUNK0oeOXqUS2cpfQ==", + "dev": true, + "license": "Artistic-2.0", + "dependencies": { + "version-range": "^4.15.0" + }, + "engines": { + "ecmascript": ">= es5", + "node": ">=4" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", @@ -2730,6 +3170,19 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, + "node_modules/environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/es-define-property": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", @@ -2829,16 +3282,6 @@ "node": ">=6" } }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/eslint": { "version": "9.21.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.21.0.tgz", @@ -2964,22 +3407,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, "node_modules/eslint/node_modules/brace-expansion": { "version": "1.1.12", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", @@ -2991,43 +3418,6 @@ "concat-map": "0.0.1" } }, - "node_modules/eslint/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/eslint/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/eslint/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, - "license": "MIT" - }, "node_modules/eslint/node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -3041,16 +3431,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint/node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -3064,19 +3444,6 @@ "node": "*" } }, - "node_modules/eslint/node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/espree": { "version": "10.3.0", "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", @@ -3095,6 +3462,20 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/esquery": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", @@ -3213,6 +3594,23 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-uri": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", + "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, "node_modules/fastq": { "version": "1.19.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz", @@ -3360,6 +3758,21 @@ "license": "MIT", "optional": true }, + "node_modules/fs-extra": { + "version": "11.3.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.1.tgz", + "integrity": "sha512-eXvGGwZ5CL17ZSwHWd3bbgk7UUpF6IFHtP57NYYakPvHOs8GDgDe5KJI36jIJzDkJ6eJjuzRA8eBQb6SkKue0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -3492,6 +3905,37 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/globby": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -3505,6 +3949,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, "node_modules/graphemer": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", @@ -3513,13 +3964,13 @@ "license": "MIT" }, "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, "license": "MIT", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/has-symbols": { @@ -3702,6 +4153,19 @@ "node": ">=0.8.19" } }, + "node_modules/index-to-position": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/index-to-position/-/index-to-position-1.1.0.tgz", + "integrity": "sha512-XPdx9Dq4t9Qk1mTMbWONJqU7boCoumEH7fRET37HX5+khDUl3J2W6PdALxhILYlIYx2amlwYcRPp28p0tSiojg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -3872,6 +4336,24 @@ "dev": true, "license": "ISC" }, + "node_modules/istextorbinary": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/istextorbinary/-/istextorbinary-9.5.0.tgz", + "integrity": "sha512-5mbUj3SiZXCuRf9fT3ibzbSSEWiy63gFfksmGfdOzujPjW3k+z8WvIBxcJHBoQNlaZaiyB25deviif2+osLmLw==", + "dev": true, + "license": "Artistic-2.0", + "dependencies": { + "binaryextensions": "^6.11.0", + "editions": "^6.21.0", + "textextensions": "^6.11.0" + }, + "engines": { + "node": ">=4" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, "node_modules/jackspeak": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.0.tgz", @@ -3897,6 +4379,13 @@ "jiti": "lib/jiti-cli.mjs" } }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -3931,6 +4420,19 @@ "dev": true, "license": "MIT" }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/jsonc-parser": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.3.1.tgz", @@ -3938,6 +4440,19 @@ "dev": true, "license": "MIT" }, + "node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, "node_modules/jsonwebtoken": { "version": "9.0.2", "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", @@ -4103,6 +4618,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true, + "license": "MIT" + }, "node_modules/lodash.includes": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", @@ -4159,6 +4681,13 @@ "dev": true, "license": "MIT" }, + "node_modules/lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==", + "dev": true, + "license": "MIT" + }, "node_modules/log-symbols": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", @@ -4430,12 +4959,61 @@ "license": "MIT", "optional": true }, - "node_modules/nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "node_modules/node-sarif-builder": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/node-sarif-builder/-/node-sarif-builder-3.2.0.tgz", + "integrity": "sha512-kVIOdynrF2CRodHZeP/97Rh1syTUHBNiw17hUCIVhlhEsWlfJm19MuO56s4MdKbr22xWx6mzMnNAgXzVlIYM9Q==", "dev": true, - "license": "BSD-2-Clause", + "license": "MIT", + "dependencies": { + "@types/sarif": "^2.1.7", + "fs-extra": "^11.1.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/normalize-package-data": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", + "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^7.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/normalize-package-data/node_modules/hosted-git-info": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", + "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/normalize-package-data/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dev": true, + "license": "BSD-2-Clause", "dependencies": { "boolbase": "^1.0.0" }, @@ -4661,6 +5239,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-map": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.3.tgz", + "integrity": "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/package-json-from-dist": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", @@ -4688,6 +5279,24 @@ "node": ">=6" } }, + "node_modules/parse-json": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-8.3.0.tgz", + "integrity": "sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "index-to-position": "^1.1.0", + "type-fest": "^4.39.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parse-semver": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/parse-semver/-/parse-semver-1.1.1.tgz", @@ -4795,6 +5404,19 @@ "node": "20 || >=22" } }, + "node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/pend": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", @@ -4802,6 +5424,13 @@ "dev": true, "license": "MIT" }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, "node_modules/picomatch": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", @@ -4815,6 +5444,16 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pluralize": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", + "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/prebuild-install": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz", @@ -4962,6 +5601,19 @@ "rc": "cli.js" } }, + "node_modules/rc-config-loader": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/rc-config-loader/-/rc-config-loader-4.1.3.tgz", + "integrity": "sha512-kD7FqML7l800i6pS6pvLyIE2ncbk9Du8Q0gp/4hMPhJU6ZxApkoLcGD8ZeqgiAlfwZ6BlETq6qqe+12DUL207w==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.4", + "js-yaml": "^4.1.0", + "json5": "^2.2.2", + "require-from-string": "^2.0.2" + } + }, "node_modules/rc/node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -4986,6 +5638,39 @@ "node": ">=0.8" } }, + "node_modules/read-pkg": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-9.0.1.tgz", + "integrity": "sha512-9viLL4/n1BJUCT1NXVTdS1jtm80yDEgR5T4yCelII49Mbj0v1rZdKqj7zCiYdbB0CuCgdrvHcNogAKTFPBocFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/normalize-package-data": "^2.4.3", + "normalize-package-data": "^6.0.0", + "parse-json": "^8.0.0", + "type-fest": "^4.6.0", + "unicorn-magic": "^0.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg/node_modules/unicorn-magic": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", + "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", @@ -5018,6 +5703,16 @@ "node": ">=0.10.0" } }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", @@ -5146,6 +5841,28 @@ "dev": true, "license": "ISC" }, + "node_modules/secretlint": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/secretlint/-/secretlint-10.2.2.tgz", + "integrity": "sha512-xVpkeHV/aoWe4vP4TansF622nBEImzCY73y/0042DuJ29iKIaqgoJ8fGxre3rVSHHbxar4FdJobmTnLp9AU0eg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@secretlint/config-creator": "^10.2.2", + "@secretlint/formatter": "^10.2.2", + "@secretlint/node": "^10.2.2", + "@secretlint/profiler": "^10.2.2", + "debug": "^4.4.1", + "globby": "^14.1.0", + "read-pkg": "^9.0.1" + }, + "bin": { + "secretlint": "bin/secretlint.js" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/semver": { "version": "7.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", @@ -5326,6 +6043,80 @@ "simple-concat": "^1.0.0" } }, + "node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true, + "license": "CC-BY-3.0" + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.22", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz", + "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, "node_modules/stdin-discarder": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.1.0.tgz", @@ -5487,17 +6278,130 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/structured-source": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/structured-source/-/structured-source-4.0.0.tgz", + "integrity": "sha512-qGzRFNJDjFieQkl/sVOI2dUjHKRyL9dAJi2gCPGJLbJHBIkyOHxjuocpIEfbLioX+qSJpvbYdT49/YCdMznKxA==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "boundary": "^2.0.0" + } + }, "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "license": "MIT", "dependencies": { - "has-flag": "^3.0.0" + "has-flag": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" + } + }, + "node_modules/supports-hyperlinks": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-3.2.0.tgz", + "integrity": "sha512-zFObLMyZeEwzAoKCyu1B91U79K2t7ApXuQfo8OuxwXLDgcKxuwM+YvcbIhm6QWqz7mHUH1TVytR1PwVVjEuMig==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0", + "supports-color": "^7.0.0" + }, + "engines": { + "node": ">=14.18" + }, + "funding": { + "url": "https://github.com/chalk/supports-hyperlinks?sponsor=1" + } + }, + "node_modules/table": { + "version": "6.9.0", + "resolved": "https://registry.npmjs.org/table/-/table-6.9.0.tgz", + "integrity": "sha512-9kY+CygyYM6j02t5YFHbNz2FN5QmYGv9zAjVp4lCDjlCw7amdckXlEt/bjMhUIfj4ThGRE4gCUH5+yGnNuPo5A==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/table/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/table/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/table/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/table/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/table/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/table/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" } }, "node_modules/tar-fs": { @@ -5587,10 +6491,50 @@ "node": ">= 6" } }, + "node_modules/terminal-link": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/terminal-link/-/terminal-link-4.0.0.tgz", + "integrity": "sha512-lk+vH+MccxNqgVqSnkMVKx4VLJfnLjDBGzH16JVZjKE2DoxP57s6/vt6JmXV5I3jBcfGrxNrYtC+mPtU7WJztA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-escapes": "^7.0.0", + "supports-hyperlinks": "^3.2.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true, + "license": "MIT" + }, + "node_modules/textextensions": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/textextensions/-/textextensions-6.11.0.tgz", + "integrity": "sha512-tXJwSr9355kFJI3lbCkPpUH5cP8/M0GGy2xLO34aZCjMXBaK3SoPnZwr/oWmo1FdCnELcs4npdCIOFtq9W3ruQ==", + "dev": true, + "license": "Artistic-2.0", + "dependencies": { + "editions": "^6.21.0" + }, + "engines": { + "node": ">=4" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, "node_modules/tmp": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", - "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", + "integrity": "sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==", "dev": true, "license": "MIT", "engines": { @@ -5667,6 +6611,19 @@ "node": ">= 0.8.0" } }, + "node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/typed-rest-client": { "version": "1.8.11", "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.8.11.tgz", @@ -5747,6 +6704,29 @@ "dev": true, "license": "MIT" }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", @@ -5781,6 +6761,30 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/version-range": { + "version": "4.15.0", + "resolved": "https://registry.npmjs.org/version-range/-/version-range-4.15.0.tgz", + "integrity": "sha512-Ck0EJbAGxHwprkzFO966t4/5QkRuzh+/I1RxhLgUKKwEn+Cd8NwM60mE3AqBZg5gYODoXW0EFsQvbZjRlvdqbg==", + "dev": true, + "license": "Artistic-2.0", + "engines": { + "node": ">=4" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, "node_modules/vscode-jsonrpc": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", @@ -5928,42 +6932,6 @@ "node": ">=8" } }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, - "license": "MIT" - }, "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 470db244f14bd..1d27a12053552 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -63,7 +63,7 @@ "@typescript-eslint/eslint-plugin": "^8.25.0", "@typescript-eslint/parser": "^8.25.0", "@vscode/test-electron": "^2.4.1", - "@vscode/vsce": "^3.2.2", + "@vscode/vsce": "^3.6.0", "esbuild": "^0.25.0", "eslint": "^9.21.0", "eslint-config-prettier": "^10.0.2", @@ -1585,6 +1585,16 @@ } } }, + { + "title": "Document", + "properties": { + "rust-analyzer.document.symbol.search.excludeLocals": { + "markdownDescription": "Exclude all locals from document symbol search.", + "default": true, + "type": "boolean" + } + } + }, { "title": "Files", "properties": { @@ -2199,6 +2209,16 @@ } } }, + { + "title": "Inlay Hints", + "properties": { + "rust-analyzer.inlayHints.expressionAdjustmentHints.disableReborrows": { + "markdownDescription": "Disable reborrows in expression adjustments inlay hints.\n\nReborrows are a pair of a builtin deref then borrow, i.e. `&*`. They are inserted by the compiler but are mostly useless to the programmer.\n\nNote: if the deref is not builtin (an overloaded deref), or the borrow is `&raw const`/`&raw mut`, they are not removed.", + "default": true, + "type": "boolean" + } + } + }, { "title": "Inlay Hints", "properties": { @@ -2830,6 +2850,16 @@ } } }, + { + "title": "Semantic Highlighting", + "properties": { + "rust-analyzer.semanticHighlighting.comments.enable": { + "markdownDescription": "Use semantic tokens for comments.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for comments, other grammars can be used to highlight\ntheir contents.", + "default": true, + "type": "boolean" + } + } + }, { "title": "Semantic Highlighting", "properties": { diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml index 1fc1da50a0a03..f56a0de616376 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml +++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lsp-server" -version = "0.7.8" +version = "0.7.9" description = "Generic LSP server scaffold." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server" @@ -16,9 +16,9 @@ crossbeam-channel.workspace = true [dev-dependencies] lsp-types = "=0.95" ctrlc = "3.4.7" -anyhow.workspace = true +anyhow.workspace = true rustc-hash.workspace = true -toolchain.workspace = true +toolchain.workspace = true [lints] workspace = true diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs index 399d674e41d25..305008e69ae1a 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs +++ b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs @@ -84,9 +84,9 @@ pub struct Response { // request id. We fail deserialization in that case, so we just // make this field mandatory. pub id: RequestId, - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(skip_serializing_if = "Option::is_none", default)] pub result: Option, - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(skip_serializing_if = "Option::is_none", default)] pub error: Option, } @@ -94,7 +94,7 @@ pub struct Response { pub struct ResponseError { pub code: i32, pub message: String, - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(skip_serializing_if = "Option::is_none", default)] pub data: Option, } @@ -175,7 +175,7 @@ impl Message { let msg = match serde_json::from_str(&text) { Ok(msg) => msg, Err(e) => { - return Err(invalid_data!("malformed LSP payload: {:?}", e)); + return Err(invalid_data!("malformed LSP payload `{e:?}`: {text:?}")); } }; diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 2178caf63968a..02b217f7d80dc 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -733dab558992d902d6d17576de1da768094e2cf3 +21a19c297d4f5a03501d92ca251bd7a17073c08a diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs index 824b38fc872d8..9bd87a7ef5fe0 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs @@ -1081,7 +1081,6 @@ fn extract_struct_traits(ast: &mut AstSrc) { "Enum", "Variant", "Trait", - "TraitAlias", "Module", "Static", "Const", diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs index 6ff6a1b15310a..fd4b600b03470 100644 --- a/src/tools/rust-analyzer/xtask/src/metrics.rs +++ b/src/tools/rust-analyzer/xtask/src/metrics.rs @@ -16,13 +16,16 @@ type Unit = String; impl flags::Metrics { pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { let mut metrics = Metrics::new(sh)?; - if !Path::new("./target/rustc-perf").exists() { - sh.create_dir("./target/rustc-perf")?; - cmd!(sh, "git clone https://github.com/rust-lang/rustc-perf.git ./target/rustc-perf") - .run()?; + if !Path::new("./target/metrics/rustc-perf").exists() { + sh.create_dir("./target/metrics/rustc-perf")?; + cmd!( + sh, + "git clone https://github.com/rust-lang/rustc-perf.git ./target/metrics/rustc-perf" + ) + .run()?; } { - let _d = sh.push_dir("./target/rustc-perf"); + let _d = sh.push_dir("./target/metrics/rustc-perf"); let revision = &metrics.perf_revision; cmd!(sh, "git reset --hard {revision}").run()?; } @@ -88,11 +91,12 @@ impl Metrics { cmd!( sh, - "git clone --depth=1 --branch 1.76.0 https://github.com/rust-lang/rust.git --single-branch" + "git clone --depth=1 --branch 1.76.0 https://github.com/rust-lang/rust.git --single-branch ./target/metrics/rust" ) .run()?; - let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?; + let output = + cmd!(sh, "./target/release/rust-analyzer rustc-tests ./target/metrics/rust").read()?; for (metric, value, unit) in parse_metrics(&output) { self.report(metric, value, unit.into()); } @@ -106,7 +110,7 @@ impl Metrics { self.measure_analysis_stats_path( sh, bench, - &format!("./target/rustc-perf/collector/compile-benchmarks/{bench}"), + &format!("./target/metrics/rustc-perf/collector/compile-benchmarks/{bench}"), ) } fn measure_analysis_stats_path( @@ -156,7 +160,7 @@ struct Host { impl Metrics { fn new(sh: &Shell) -> anyhow::Result { - let host = Host::new(sh)?; + let host = Host::new(sh).unwrap_or_else(|_| Host::unknown()); let timestamp = SystemTime::now(); let revision = cmd!(sh, "git rev-parse HEAD").read()?; let perf_revision = "a584462e145a0c04760fd9391daefb4f6bd13a99".into(); @@ -187,9 +191,13 @@ impl Metrics { } impl Host { + fn unknown() -> Host { + Host { os: "unknown".into(), cpu: "unknown".into(), mem: "unknown".into() } + } + fn new(sh: &Shell) -> anyhow::Result { if cfg!(not(target_os = "linux")) { - return Ok(Host { os: "unknown".into(), cpu: "unknown".into(), mem: "unknown".into() }); + return Ok(Host::unknown()); } let os = read_field(sh, "/etc/os-release", "PRETTY_NAME=")?.trim_matches('"').to_owned(); diff --git a/src/tools/rust-analyzer/xtask/src/tidy.rs b/src/tools/rust-analyzer/xtask/src/tidy.rs index f91192b0076ba..0462835f0675a 100644 --- a/src/tools/rust-analyzer/xtask/src/tidy.rs +++ b/src/tools/rust-analyzer/xtask/src/tidy.rs @@ -235,6 +235,10 @@ impl TidyDocs { return; } + if is_ported_from_rustc(path, &["crates/hir-ty/src/next_solver"]) { + return; + } + let first_line = match text.lines().next() { Some(it) => it, None => return, @@ -290,6 +294,11 @@ fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { .any(|it| dirs_to_exclude.contains(&it)) } +fn is_ported_from_rustc(p: &Path, dirs_to_exclude: &[&str]) -> bool { + let p = p.strip_prefix(project_root()).unwrap(); + dirs_to_exclude.iter().any(|exclude| p.starts_with(exclude)) +} + #[derive(Default)] struct TidyMarks { hits: HashSet,

( + &self, + tcx: DbInterner<'db>, + value: impl Upcast, P>, + ) -> Obligation<'db, P> { + Obligation::with_depth(tcx, self.cause.clone(), self.recursion_depth, self.param_env, value) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/type_variable.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/type_variable.rs new file mode 100644 index 0000000000000..29e7b883c93bf --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/type_variable.rs @@ -0,0 +1,394 @@ +//! Storage for type variables for the infer context the next-trait-solver. + +use std::cmp; +use std::marker::PhantomData; +use std::ops::Range; + +use ena::snapshot_vec as sv; +use ena::undo_log::Rollback; +use ena::unify as ut; +use rustc_index::IndexVec; +use rustc_type_ir::TyVid; +use rustc_type_ir::UniverseIndex; +use rustc_type_ir::inherent::Ty as _; +use tracing::debug; + +use crate::next_solver::SolverDefId; +use crate::next_solver::Ty; +use crate::next_solver::infer::{InferCtxtUndoLogs, iter_idx_range}; + +/// Represents a single undo-able action that affects a type inference variable. +#[derive(Clone)] +pub(crate) enum UndoLog<'tcx> { + EqRelation(sv::UndoLog>>), + SubRelation(sv::UndoLog>), +} + +/// Convert from a specific kind of undo to the more general UndoLog +impl<'db> From>>> for UndoLog<'db> { + fn from(l: sv::UndoLog>>) -> Self { + UndoLog::EqRelation(l) + } +} + +/// Convert from a specific kind of undo to the more general UndoLog +impl<'db> From>> for UndoLog<'db> { + fn from(l: sv::UndoLog>) -> Self { + UndoLog::SubRelation(l) + } +} + +impl<'db> Rollback>>> for TypeVariableStorage<'db> { + fn reverse(&mut self, undo: sv::UndoLog>>) { + self.eq_relations.reverse(undo) + } +} + +impl<'tcx> Rollback>> for TypeVariableStorage<'tcx> { + fn reverse(&mut self, undo: sv::UndoLog>) { + self.sub_unification_table.reverse(undo) + } +} + +impl<'tcx> Rollback> for TypeVariableStorage<'tcx> { + fn reverse(&mut self, undo: UndoLog<'tcx>) { + match undo { + UndoLog::EqRelation(undo) => self.eq_relations.reverse(undo), + UndoLog::SubRelation(undo) => self.sub_unification_table.reverse(undo), + } + } +} + +#[derive(Debug, Clone, Default)] +pub(crate) struct TypeVariableStorage<'db> { + /// The origins of each type variable. + values: IndexVec, + /// Two variables are unified in `eq_relations` when we have a + /// constraint `?X == ?Y`. This table also stores, for each key, + /// the known value. + eq_relations: ut::UnificationTableStorage>, + /// Only used by `-Znext-solver` and for diagnostics. Tracks whether + /// type variables are related via subtyping at all, ignoring which of + /// the two is the subtype. + /// + /// When reporting ambiguity errors, we sometimes want to + /// treat all inference vars which are subtypes of each + /// others as if they are equal. For this case we compute + /// the transitive closure of our subtype obligations here. + /// + /// E.g. when encountering ambiguity errors, we want to suggest + /// specifying some method argument or to add a type annotation + /// to a local variable. Because subtyping cannot change the + /// shape of a type, it's fine if the cause of the ambiguity error + /// is only related to the suggested variable via subtyping. + /// + /// Even for something like `let x = returns_arg(); x.method();` the + /// type of `x` is only a supertype of the argument of `returns_arg`. We + /// still want to suggest specifying the type of the argument. + sub_unification_table: ut::UnificationTableStorage, +} + +pub(crate) struct TypeVariableTable<'a, 'db> { + storage: &'a mut TypeVariableStorage<'db>, + + undo_log: &'a mut InferCtxtUndoLogs<'db>, +} + +#[derive(Copy, Clone, Debug)] +pub struct TypeVariableOrigin { + /// `DefId` of the type parameter this was instantiated for, if any. + /// + /// This should only be used for diagnostics. + pub param_def_id: Option, +} + +#[derive(Debug, Clone)] +pub(crate) struct TypeVariableData { + origin: TypeVariableOrigin, +} + +#[derive(Clone, Debug)] +pub(crate) enum TypeVariableValue<'db> { + Known { value: Ty<'db> }, + Unknown { universe: UniverseIndex }, +} + +impl<'db> TypeVariableValue<'db> { + /// If this value is known, returns the type it is known to be. + /// Otherwise, `None`. + pub(crate) fn known(&self) -> Option> { + match self { + TypeVariableValue::Unknown { .. } => None, + TypeVariableValue::Known { value } => Some(*value), + } + } + + pub(crate) fn is_unknown(&self) -> bool { + match *self { + TypeVariableValue::Unknown { .. } => true, + TypeVariableValue::Known { .. } => false, + } + } +} + +impl<'db> TypeVariableStorage<'db> { + #[inline] + pub(crate) fn with_log<'a>( + &'a mut self, + undo_log: &'a mut InferCtxtUndoLogs<'db>, + ) -> TypeVariableTable<'a, 'db> { + TypeVariableTable { storage: self, undo_log } + } + + #[inline] + pub(crate) fn eq_relations_ref(&self) -> &ut::UnificationTableStorage> { + &self.eq_relations + } + + pub(super) fn finalize_rollback(&mut self) { + debug_assert!(self.values.len() >= self.eq_relations.len()); + self.values.truncate(self.eq_relations.len()); + } +} + +impl<'db> TypeVariableTable<'_, 'db> { + /// Returns the origin that was given when `vid` was created. + /// + /// Note that this function does not return care whether + /// `vid` has been unified with something else or not. + pub(crate) fn var_origin(&self, vid: TyVid) -> TypeVariableOrigin { + self.storage.values[vid].origin + } + + /// Records that `a == b`, depending on `dir`. + /// + /// Precondition: neither `a` nor `b` are known. + pub(crate) fn equate(&mut self, a: TyVid, b: TyVid) { + debug_assert!(self.probe(a).is_unknown()); + debug_assert!(self.probe(b).is_unknown()); + self.eq_relations().union(a, b); + self.sub_unification_table().union(a, b); + } + + /// Records that `a` and `b` are related via subtyping. We don't track + /// which of the two is the subtype. + /// + /// Precondition: neither `a` nor `b` are known. + pub(crate) fn sub_unify(&mut self, a: TyVid, b: TyVid) { + debug_assert!(self.probe(a).is_unknown()); + debug_assert!(self.probe(b).is_unknown()); + self.sub_unification_table().union(a, b); + } + + /// Instantiates `vid` with the type `ty`. + /// + /// Precondition: `vid` must not have been previously instantiated. + pub(crate) fn instantiate(&mut self, vid: TyVid, ty: Ty<'db>) { + let vid = self.root_var(vid); + debug_assert!(!ty.is_ty_var(), "instantiating ty var with var: {vid:?} {ty:?}"); + debug_assert!(self.probe(vid).is_unknown()); + debug_assert!( + self.eq_relations().probe_value(vid).is_unknown(), + "instantiating type variable `{vid:?}` twice: new-value = {ty:?}, old-value={:?}", + self.eq_relations().probe_value(vid) + ); + self.eq_relations().union_value(vid, TypeVariableValue::Known { value: ty }); + } + + /// Creates a new type variable. + /// + /// - `diverging`: indicates if this is a "diverging" type + /// variable, e.g., one created as the type of a `return` + /// expression. The code in this module doesn't care if a + /// variable is diverging, but the main Rust type-checker will + /// sometimes "unify" such variables with the `!` or `()` types. + /// - `origin`: indicates *why* the type variable was created. + /// The code in this module doesn't care, but it can be useful + /// for improving error messages. + pub(crate) fn new_var(&mut self, universe: UniverseIndex, origin: TypeVariableOrigin) -> TyVid { + let eq_key = self.eq_relations().new_key(TypeVariableValue::Unknown { universe }); + + let sub_key = self.sub_unification_table().new_key(()); + debug_assert_eq!(eq_key.vid, sub_key.vid); + + let index = self.storage.values.push(TypeVariableData { origin }); + debug_assert_eq!(eq_key.vid, index); + + debug!("new_var(index={:?}, universe={:?}, origin={:?})", eq_key.vid, universe, origin); + + index + } + + /// Returns the number of type variables created thus far. + pub(crate) fn num_vars(&self) -> usize { + self.storage.values.len() + } + + /// Returns the "root" variable of `vid` in the `eq_relations` + /// equivalence table. All type variables that have been equated + /// will yield the same root variable (per the union-find + /// algorithm), so `root_var(a) == root_var(b)` implies that `a == + /// b` (transitively). + pub(crate) fn root_var(&mut self, vid: TyVid) -> TyVid { + self.eq_relations().find(vid).vid + } + + /// Returns the "root" variable of `vid` in the `sub_unification_table` + /// equivalence table. All type variables that have been are related via + /// equality or subtyping will yield the same root variable (per the + /// union-find algorithm), so `sub_unification_table_root_var(a) + /// == sub_unification_table_root_var(b)` implies that: + /// ```text + /// exists X. (a <: X || X <: a) && (b <: X || X <: b) + /// ``` + pub(crate) fn sub_unification_table_root_var(&mut self, vid: TyVid) -> TyVid { + self.sub_unification_table().find(vid).vid + } + + /// Retrieves the type to which `vid` has been instantiated, if + /// any. + pub(crate) fn probe(&mut self, vid: TyVid) -> TypeVariableValue<'db> { + self.inlined_probe(vid) + } + + /// An always-inlined variant of `probe`, for very hot call sites. + #[inline(always)] + pub(crate) fn inlined_probe(&mut self, vid: TyVid) -> TypeVariableValue<'db> { + self.eq_relations().inlined_probe_value(vid) + } + + #[inline] + fn eq_relations(&mut self) -> super::UnificationTable<'_, 'db, TyVidEqKey<'db>> { + self.storage.eq_relations.with_log(self.undo_log) + } + + #[inline] + fn sub_unification_table(&mut self) -> super::UnificationTable<'_, 'db, TyVidSubKey> { + self.storage.sub_unification_table.with_log(self.undo_log) + } + + /// Returns a range of the type variables created during the snapshot. + pub(crate) fn vars_since_snapshot( + &mut self, + value_count: usize, + ) -> (Range, Vec) { + let range = TyVid::from_usize(value_count)..TyVid::from_usize(self.num_vars()); + (range.clone(), iter_idx_range(range).map(|index| self.var_origin(index)).collect()) + } + + /// Returns indices of all variables that are not yet + /// instantiated. + pub(crate) fn unresolved_variables(&mut self) -> Vec { + (0..self.num_vars()) + .filter_map(|i| { + let vid = TyVid::from_usize(i); + match self.probe(vid) { + TypeVariableValue::Unknown { .. } => Some(vid), + TypeVariableValue::Known { .. } => None, + } + }) + .collect() + } +} + +/////////////////////////////////////////////////////////////////////////// + +/// These structs (a newtyped TyVid) are used as the unification key +/// for the `eq_relations`; they carry a `TypeVariableValue` along +/// with them. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub(crate) struct TyVidEqKey<'db> { + vid: TyVid, + + // in the table, we map each ty-vid to one of these: + phantom: PhantomData>, +} + +impl<'db> From for TyVidEqKey<'db> { + #[inline] // make this function eligible for inlining - it is quite hot. + fn from(vid: TyVid) -> Self { + TyVidEqKey { vid, phantom: PhantomData } + } +} + +impl<'db> ut::UnifyKey for TyVidEqKey<'db> { + type Value = TypeVariableValue<'db>; + #[inline(always)] + fn index(&self) -> u32 { + self.vid.as_u32() + } + #[inline] + fn from_index(i: u32) -> Self { + TyVidEqKey::from(TyVid::from_u32(i)) + } + fn tag() -> &'static str { + "TyVidEqKey" + } + fn order_roots(a: Self, _: &Self::Value, b: Self, _: &Self::Value) -> Option<(Self, Self)> { + if a.vid.as_u32() < b.vid.as_u32() { Some((a, b)) } else { Some((b, a)) } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub(crate) struct TyVidSubKey { + vid: TyVid, +} + +impl From for TyVidSubKey { + #[inline] // make this function eligible for inlining - it is quite hot. + fn from(vid: TyVid) -> Self { + TyVidSubKey { vid } + } +} + +impl ut::UnifyKey for TyVidSubKey { + type Value = (); + #[inline] + fn index(&self) -> u32 { + self.vid.as_u32() + } + #[inline] + fn from_index(i: u32) -> TyVidSubKey { + TyVidSubKey { vid: TyVid::from_u32(i) } + } + fn tag() -> &'static str { + "TyVidSubKey" + } +} + +impl<'db> ut::UnifyValue for TypeVariableValue<'db> { + type Error = ut::NoError; + + fn unify_values(value1: &Self, value2: &Self) -> Result { + match (value1, value2) { + // We never equate two type variables, both of which + // have known types. Instead, we recursively equate + // those types. + (&TypeVariableValue::Known { .. }, &TypeVariableValue::Known { .. }) => { + panic!("equating two type variables, both of which have known types") + } + + // If one side is known, prefer that one. + (&TypeVariableValue::Known { .. }, &TypeVariableValue::Unknown { .. }) => { + Ok(value1.clone()) + } + (&TypeVariableValue::Unknown { .. }, &TypeVariableValue::Known { .. }) => { + Ok(value2.clone()) + } + + // If both sides are *unknown*, it hardly matters, does it? + ( + &TypeVariableValue::Unknown { universe: universe1 }, + &TypeVariableValue::Unknown { universe: universe2 }, + ) => { + // If we unify two unbound variables, ?T and ?U, then whatever + // value they wind up taking (which must be the same value) must + // be nameable by both universes. Therefore, the resulting + // universe is the minimum of the two universes, because that is + // the one which contains the fewest names in scope. + let universe = cmp::min(universe1, universe2); + Ok(TypeVariableValue::Unknown { universe }) + } + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/unify_key.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/unify_key.rs new file mode 100644 index 0000000000000..dc913b262a7c2 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/unify_key.rs @@ -0,0 +1,179 @@ +//! Unification keyes for the infer context the next-trait-solver. + +use std::cmp; +use std::marker::PhantomData; + +use ena::unify::{NoError, UnifyKey, UnifyValue}; +use rustc_type_ir::{ConstVid, RegionKind, RegionVid, UniverseIndex, inherent::IntoKind}; + +use crate::next_solver::{Const, Region, SolverDefId, Ty}; + +#[derive(Clone, Debug)] +pub enum RegionVariableValue<'db> { + Known { value: Region<'db> }, + Unknown { universe: UniverseIndex }, +} + +#[derive(PartialEq, Copy, Clone, Debug)] +pub struct RegionVidKey<'db> { + pub vid: RegionVid, + pub phantom: PhantomData>, +} + +impl<'db> From for RegionVidKey<'db> { + fn from(vid: RegionVid) -> Self { + RegionVidKey { vid, phantom: PhantomData } + } +} + +impl<'db> UnifyKey for RegionVidKey<'db> { + type Value = RegionVariableValue<'db>; + #[inline] + fn index(&self) -> u32 { + self.vid.as_u32() + } + #[inline] + fn from_index(i: u32) -> Self { + RegionVidKey::from(RegionVid::from_u32(i)) + } + fn tag() -> &'static str { + "RegionVidKey" + } +} + +pub struct RegionUnificationError; +impl<'db> UnifyValue for RegionVariableValue<'db> { + type Error = RegionUnificationError; + + fn unify_values(value1: &Self, value2: &Self) -> Result { + match (value1, value2) { + (RegionVariableValue::Known { .. }, RegionVariableValue::Known { .. }) => { + Err(RegionUnificationError) + } + + (RegionVariableValue::Known { value }, RegionVariableValue::Unknown { universe }) + | (RegionVariableValue::Unknown { universe }, RegionVariableValue::Known { value }) => { + let universe_of_value = match (*value).kind() { + RegionKind::ReStatic + | RegionKind::ReErased + | RegionKind::ReLateParam(..) + | RegionKind::ReEarlyParam(..) + | RegionKind::ReError(_) => UniverseIndex::ROOT, + RegionKind::RePlaceholder(placeholder) => placeholder.universe, + RegionKind::ReVar(..) | RegionKind::ReBound(..) => { + panic!("not a universal region") + } + }; + + if universe.can_name(universe_of_value) { + Ok(RegionVariableValue::Known { value: *value }) + } else { + Err(RegionUnificationError) + } + } + + ( + RegionVariableValue::Unknown { universe: a }, + RegionVariableValue::Unknown { universe: b }, + ) => { + // If we unify two unconstrained regions then whatever + // value they wind up taking (which must be the same value) must + // be nameable by both universes. Therefore, the resulting + // universe is the minimum of the two universes, because that is + // the one which contains the fewest names in scope. + Ok(RegionVariableValue::Unknown { universe: (*a).min(*b) }) + } + } + } +} + +// Generic consts. + +#[derive(Copy, Clone, Debug)] +pub struct ConstVariableOrigin { + /// `DefId` of the const parameter this was instantiated for, if any. + /// + /// This should only be used for diagnostics. + pub param_def_id: Option, +} + +#[derive(Clone, Debug)] +pub enum ConstVariableValue<'db> { + Known { value: Const<'db> }, + Unknown { origin: ConstVariableOrigin, universe: UniverseIndex }, +} + +impl<'db> ConstVariableValue<'db> { + /// If this value is known, returns the const it is known to be. + /// Otherwise, `None`. + pub fn known(&self) -> Option> { + match self { + ConstVariableValue::Unknown { .. } => None, + ConstVariableValue::Known { value } => Some(*value), + } + } +} + +#[derive(PartialEq, Copy, Clone, Debug)] +pub struct ConstVidKey<'db> { + pub vid: ConstVid, + pub phantom: PhantomData>, +} + +impl<'db> From for ConstVidKey<'db> { + fn from(vid: ConstVid) -> Self { + ConstVidKey { vid, phantom: PhantomData } + } +} + +impl<'db> UnifyKey for ConstVidKey<'db> { + type Value = ConstVariableValue<'db>; + #[inline] + fn index(&self) -> u32 { + self.vid.as_u32() + } + #[inline] + fn from_index(i: u32) -> Self { + ConstVidKey::from(ConstVid::from_u32(i)) + } + fn tag() -> &'static str { + "ConstVidKey" + } + fn order_roots(a: Self, _: &Self::Value, b: Self, _: &Self::Value) -> Option<(Self, Self)> { + if a.vid.as_u32() < b.vid.as_u32() { Some((a, b)) } else { Some((b, a)) } + } +} + +impl<'db> UnifyValue for ConstVariableValue<'db> { + type Error = NoError; + + fn unify_values(value1: &Self, value2: &Self) -> Result { + match (value1, value2) { + (ConstVariableValue::Known { .. }, ConstVariableValue::Known { .. }) => { + panic!("equating two const variables, both of which have known values") + } + + // If one side is known, prefer that one. + (ConstVariableValue::Known { .. }, ConstVariableValue::Unknown { .. }) => { + Ok(value1.clone()) + } + (ConstVariableValue::Unknown { .. }, ConstVariableValue::Known { .. }) => { + Ok(value2.clone()) + } + + // If both sides are *unknown*, it hardly matters, does it? + ( + ConstVariableValue::Unknown { origin, universe: universe1 }, + ConstVariableValue::Unknown { origin: _, universe: universe2 }, + ) => { + // If we unify two unbound variables, ?T and ?U, then whatever + // value they wind up taking (which must be the same value) must + // be nameable by both universes. Therefore, the resulting + // universe is the minimum of the two universes, because that is + // the one which contains the fewest names in scope. + let universe = cmp::min(*universe1, *universe2); + Ok(ConstVariableValue::Unknown { origin: *origin, universe }) + } + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs new file mode 100644 index 0000000000000..bc19d51d23e32 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs @@ -0,0 +1,499 @@ +pub use rustc_next_trait_solver::solve::inspect::*; + +use rustc_ast_ir::try_visit; +use rustc_next_trait_solver::{ + canonical::instantiate_canonical_state, + resolve::eager_resolve_vars, + solve::{SolverDelegateEvalExt, inspect}, +}; +use rustc_type_ir::{ + VisitorResult, + inherent::{IntoKind, Span as _}, + solve::{Certainty, GoalSource, MaybeCause, NoSolution}, +}; + +use crate::next_solver::{ + DbInterner, GenericArg, GenericArgs, Goal, NormalizesTo, ParamEnv, Predicate, PredicateKind, + QueryResult, SolverContext, Span, Term, + fulfill::NextSolverError, + infer::{ + InferCtxt, + traits::{Obligation, ObligationCause}, + }, + obligation_ctxt::ObligationCtxt, +}; + +pub struct InspectConfig { + pub max_depth: usize, +} + +pub struct InspectGoal<'a, 'db> { + infcx: &'a SolverContext<'db>, + depth: usize, + orig_values: Vec>, + goal: Goal<'db, Predicate<'db>>, + result: Result, + final_revision: inspect::Probe>, + normalizes_to_term_hack: Option>, + source: GoalSource, +} + +impl<'a, 'db> std::fmt::Debug for InspectGoal<'a, 'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("InspectGoal") + .field("depth", &self.depth) + .field("orig_values", &self.orig_values) + .field("goal", &self.goal) + .field("result", &self.result) + .field("final_revision", &self.final_revision) + .field("normalizes_to_term_hack", &self.normalizes_to_term_hack) + .field("source", &self.source) + .finish() + } +} + +impl<'a, 'db> std::fmt::Debug for InspectCandidate<'a, 'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("InspectCandidate") + .field("kind", &self.kind) + .field("steps", &self.steps) + .field("final_state", &self.final_state) + .field("result", &self.result) + .field("shallow_certainty", &self.shallow_certainty) + .finish() + } +} + +/// The expected term of a `NormalizesTo` goal gets replaced +/// with an unconstrained inference variable when computing +/// `NormalizesTo` goals and we return the nested goals to the +/// caller, who also equates the actual term with the expected. +/// +/// This is an implementation detail of the trait solver and +/// not something we want to leak to users. We therefore +/// treat `NormalizesTo` goals as if they apply the expected +/// type at the end of each candidate. +#[derive(Debug, Copy, Clone)] +struct NormalizesToTermHack<'db> { + term: Term<'db>, + unconstrained_term: Term<'db>, +} + +impl<'db> NormalizesToTermHack<'db> { + /// Relate the `term` with the new `unconstrained_term` created + /// when computing the proof tree for this `NormalizesTo` goals. + /// This handles nested obligations. + fn constrain_and( + &self, + infcx: &InferCtxt<'db>, + param_env: ParamEnv<'db>, + f: impl FnOnce(&mut ObligationCtxt<'_, 'db>), + ) -> Result { + let mut ocx = ObligationCtxt::new(infcx); + ocx.eq(&ObligationCause::dummy(), param_env, self.term, self.unconstrained_term)?; + f(&mut ocx); + let errors = ocx.select_all_or_error(); + if errors.is_empty() { + Ok(Certainty::Yes) + } else if errors.iter().all(|e| !matches!(e, NextSolverError::TrueError(_))) { + Ok(Certainty::AMBIGUOUS) + } else { + Err(NoSolution) + } + } +} + +pub struct InspectCandidate<'a, 'db> { + goal: &'a InspectGoal<'a, 'db>, + kind: inspect::ProbeKind>, + steps: Vec<&'a inspect::ProbeStep>>, + final_state: inspect::CanonicalState, ()>, + result: QueryResult<'db>, + shallow_certainty: Certainty, +} + +impl<'a, 'db> InspectCandidate<'a, 'db> { + pub fn kind(&self) -> inspect::ProbeKind> { + self.kind + } + + pub fn result(&self) -> Result { + self.result.map(|c| c.value.certainty) + } + + pub fn goal(&self) -> &'a InspectGoal<'a, 'db> { + self.goal + } + + /// Certainty passed into `evaluate_added_goals_and_make_canonical_response`. + /// + /// If this certainty is `Yes`, then we must be confident that the candidate + /// must hold iff it's nested goals hold. This is not true if the certainty is + /// `Maybe(..)`, which suggests we forced ambiguity instead. + /// + /// This is *not* the certainty of the candidate's full nested evaluation, which + /// can be accessed with [`Self::result`] instead. + pub fn shallow_certainty(&self) -> Certainty { + self.shallow_certainty + } + + /// Visit all nested goals of this candidate without rolling + /// back their inference constraints. This function modifies + /// the state of the `infcx`. + pub fn visit_nested_no_probe>(&self, visitor: &mut V) -> V::Result { + for goal in self.instantiate_nested_goals() { + try_visit!(goal.visit_with(visitor)); + } + + V::Result::output() + } + + /// Instantiate the nested goals for the candidate without rolling back their + /// inference constraints. This function modifies the state of the `infcx`. + /// + /// See [`Self::instantiate_impl_args`] if you need the impl args too. + pub fn instantiate_nested_goals(&self) -> Vec> { + let infcx = self.goal.infcx; + let param_env = self.goal.goal.param_env; + let mut orig_values = self.goal.orig_values.to_vec(); + + let mut instantiated_goals = vec![]; + for step in &self.steps { + match **step { + inspect::ProbeStep::AddGoal(source, goal) => instantiated_goals.push(( + source, + instantiate_canonical_state( + infcx, + Span::dummy(), + param_env, + &mut orig_values, + goal, + ), + )), + inspect::ProbeStep::RecordImplArgs { .. } => {} + inspect::ProbeStep::MakeCanonicalResponse { .. } + | inspect::ProbeStep::NestedProbe(_) => unreachable!(), + } + } + + let () = instantiate_canonical_state( + infcx, + Span::dummy(), + param_env, + &mut orig_values, + self.final_state, + ); + + if let Some(term_hack) = &self.goal.normalizes_to_term_hack { + // FIXME: We ignore the expected term of `NormalizesTo` goals + // when computing the result of its candidates. This is + // scuffed. + let _ = term_hack.constrain_and(infcx, param_env, |_| {}); + } + + instantiated_goals + .into_iter() + .map(|(source, goal)| self.instantiate_proof_tree_for_nested_goal(source, goal)) + .collect() + } + + /// Instantiate the args of an impl if this candidate came from a + /// `CandidateSource::Impl`. This function modifies the state of the + /// `infcx`. + pub fn instantiate_impl_args(&self) -> GenericArgs<'db> { + let infcx = self.goal.infcx; + let param_env = self.goal.goal.param_env; + let mut orig_values = self.goal.orig_values.to_vec(); + + for step in &self.steps { + match **step { + inspect::ProbeStep::RecordImplArgs { impl_args } => { + let impl_args = instantiate_canonical_state( + infcx, + Span::dummy(), + param_env, + &mut orig_values, + impl_args, + ); + + let () = instantiate_canonical_state( + infcx, + Span::dummy(), + param_env, + &mut orig_values, + self.final_state, + ); + + // No reason we couldn't support this, but we don't need to for select. + assert!( + self.goal.normalizes_to_term_hack.is_none(), + "cannot use `instantiate_impl_args` with a `NormalizesTo` goal" + ); + + return eager_resolve_vars(infcx, impl_args); + } + inspect::ProbeStep::AddGoal(..) => {} + inspect::ProbeStep::MakeCanonicalResponse { .. } + | inspect::ProbeStep::NestedProbe(_) => unreachable!(), + } + } + + panic!("expected impl args probe step for `instantiate_impl_args`"); + } + + pub fn instantiate_proof_tree_for_nested_goal( + &self, + source: GoalSource, + goal: Goal<'db, Predicate<'db>>, + ) -> InspectGoal<'a, 'db> { + let infcx = self.goal.infcx; + match goal.predicate.kind().no_bound_vars() { + Some(PredicateKind::NormalizesTo(NormalizesTo { alias, term })) => { + let unconstrained_term = infcx.next_term_var_of_kind(term); + let goal = + goal.with(infcx.interner, NormalizesTo { alias, term: unconstrained_term }); + // We have to use a `probe` here as evaluating a `NormalizesTo` can constrain the + // expected term. This means that candidates which only fail due to nested goals + // and which normalize to a different term then the final result could ICE: when + // building their proof tree, the expected term was unconstrained, but when + // instantiating the candidate it is already constrained to the result of another + // candidate. + let normalizes_to_term_hack = NormalizesToTermHack { term, unconstrained_term }; + let (proof_tree, nested_goals_result) = infcx.probe(|_| { + // Here, if we have any nested goals, then we make sure to apply them + // considering the constrained RHS, and pass the resulting certainty to + // `InspectGoal::new` so that the goal has the right result (and maintains + // the impression that we don't do this normalizes-to infer hack at all). + let (nested, proof_tree) = + infcx.evaluate_root_goal_for_proof_tree(goal, Span::dummy()); + let nested_goals_result = nested.and_then(|nested| { + normalizes_to_term_hack.constrain_and( + infcx, + proof_tree.uncanonicalized_goal.param_env, + |ocx| { + ocx.register_obligations(nested.0.into_iter().map(|(_, goal)| { + Obligation::new( + infcx.interner, + ObligationCause::dummy(), + goal.param_env, + goal.predicate, + ) + })); + }, + ) + }); + (proof_tree, nested_goals_result) + }); + InspectGoal::new( + infcx, + self.goal.depth + 1, + proof_tree, + Some((normalizes_to_term_hack, nested_goals_result)), + source, + ) + } + _ => { + // We're using a probe here as evaluating a goal could constrain + // inference variables by choosing one candidate. If we then recurse + // into another candidate who ends up with different inference + // constraints, we get an ICE if we already applied the constraints + // from the chosen candidate. + let proof_tree = + infcx.probe(|_| infcx.evaluate_root_goal_for_proof_tree(goal, Span::dummy()).1); + InspectGoal::new(infcx, self.goal.depth + 1, proof_tree, None, source) + } + } + } + + /// Visit all nested goals of this candidate, rolling back + /// all inference constraints. + pub fn visit_nested_in_probe>(&self, visitor: &mut V) -> V::Result { + self.goal.infcx.probe(|_| self.visit_nested_no_probe(visitor)) + } +} + +impl<'a, 'db> InspectGoal<'a, 'db> { + pub fn infcx(&self) -> &'a InferCtxt<'db> { + self.infcx + } + + pub fn goal(&self) -> Goal<'db, Predicate<'db>> { + self.goal + } + + pub fn result(&self) -> Result { + self.result + } + + pub fn source(&self) -> GoalSource { + self.source + } + + pub fn depth(&self) -> usize { + self.depth + } + + fn candidates_recur( + &'a self, + candidates: &mut Vec>, + steps: &mut Vec<&'a inspect::ProbeStep>>, + probe: &'a inspect::Probe>, + ) { + let mut shallow_certainty = None; + for step in &probe.steps { + match *step { + inspect::ProbeStep::AddGoal(..) | inspect::ProbeStep::RecordImplArgs { .. } => { + steps.push(step) + } + inspect::ProbeStep::MakeCanonicalResponse { shallow_certainty: c } => { + assert!(matches!( + shallow_certainty.replace(c), + None | Some(Certainty::Maybe { cause: MaybeCause::Ambiguity, .. }) + )); + } + inspect::ProbeStep::NestedProbe(ref probe) => { + match probe.kind { + // These never assemble candidates for the goal we're trying to solve. + inspect::ProbeKind::ProjectionCompatibility + | inspect::ProbeKind::ShadowedEnvProbing => continue, + + inspect::ProbeKind::NormalizedSelfTyAssembly + | inspect::ProbeKind::UnsizeAssembly + | inspect::ProbeKind::Root { .. } + | inspect::ProbeKind::TraitCandidate { .. } + | inspect::ProbeKind::OpaqueTypeStorageLookup { .. } + | inspect::ProbeKind::RigidAlias { .. } => { + // Nested probes have to prove goals added in their parent + // but do not leak them, so we truncate the added goals + // afterwards. + let num_steps = steps.len(); + self.candidates_recur(candidates, steps, probe); + steps.truncate(num_steps); + } + } + } + } + } + + match probe.kind { + inspect::ProbeKind::ProjectionCompatibility + | inspect::ProbeKind::ShadowedEnvProbing => { + panic!() + } + + inspect::ProbeKind::NormalizedSelfTyAssembly | inspect::ProbeKind::UnsizeAssembly => {} + + // We add a candidate even for the root evaluation if there + // is only one way to prove a given goal, e.g. for `WellFormed`. + inspect::ProbeKind::Root { result } + | inspect::ProbeKind::TraitCandidate { source: _, result } + | inspect::ProbeKind::OpaqueTypeStorageLookup { result } + | inspect::ProbeKind::RigidAlias { result } => { + // We only add a candidate if `shallow_certainty` was set, which means + // that we ended up calling `evaluate_added_goals_and_make_canonical_response`. + if let Some(shallow_certainty) = shallow_certainty { + candidates.push(InspectCandidate { + goal: self, + kind: probe.kind, + steps: steps.clone(), + final_state: probe.final_state, + shallow_certainty, + result, + }); + } + } + } + } + + pub fn candidates(&'a self) -> Vec> { + let mut candidates = vec![]; + let mut nested_goals = vec![]; + self.candidates_recur(&mut candidates, &mut nested_goals, &self.final_revision); + candidates + } + + /// Returns the single candidate applicable for the current goal, if it exists. + /// + /// Returns `None` if there are either no or multiple applicable candidates. + pub fn unique_applicable_candidate(&'a self) -> Option> { + // FIXME(-Znext-solver): This does not handle impl candidates + // hidden by env candidates. + let mut candidates = self.candidates(); + candidates.retain(|c| c.result().is_ok()); + candidates.pop().filter(|_| candidates.is_empty()) + } + + fn new( + infcx: &'a InferCtxt<'db>, + depth: usize, + root: inspect::GoalEvaluation>, + term_hack_and_nested_certainty: Option<( + NormalizesToTermHack<'db>, + Result, + )>, + source: GoalSource, + ) -> Self { + let infcx = <&SolverContext<'db>>::from(infcx); + + let inspect::GoalEvaluation { uncanonicalized_goal, orig_values, final_revision, result } = + root; + // If there's a normalizes-to goal, AND the evaluation result with the result of + // constraining the normalizes-to RHS and computing the nested goals. + let result = result.and_then(|ok| { + let nested_goals_certainty = + term_hack_and_nested_certainty.map_or(Ok(Certainty::Yes), |(_, c)| c)?; + Ok(ok.value.certainty.and(nested_goals_certainty)) + }); + + InspectGoal { + infcx, + depth, + orig_values, + goal: eager_resolve_vars(infcx, uncanonicalized_goal), + result, + final_revision, + normalizes_to_term_hack: term_hack_and_nested_certainty.map(|(n, _)| n), + source, + } + } + + pub(crate) fn visit_with>(&self, visitor: &mut V) -> V::Result { + if self.depth < visitor.config().max_depth { + try_visit!(visitor.visit_goal(self)); + } + + V::Result::output() + } +} + +/// The public API to interact with proof trees. +pub trait ProofTreeVisitor<'db> { + type Result: VisitorResult; + + fn config(&self) -> InspectConfig { + InspectConfig { max_depth: 10 } + } + + fn visit_goal(&mut self, goal: &InspectGoal<'_, 'db>) -> Self::Result; +} + +impl<'db> InferCtxt<'db> { + pub(crate) fn visit_proof_tree>( + &self, + goal: Goal<'db, Predicate<'db>>, + visitor: &mut V, + ) -> V::Result { + self.visit_proof_tree_at_depth(goal, 0, visitor) + } + + pub(crate) fn visit_proof_tree_at_depth>( + &self, + goal: Goal<'db, Predicate<'db>>, + depth: usize, + visitor: &mut V, + ) -> V::Result { + let (_, proof_tree) = <&SolverContext<'db>>::from(self) + .evaluate_root_goal_for_proof_tree(goal, Span::dummy()); + visitor.visit_goal(&InspectGoal::new(self, depth, proof_tree, None, GoalSource::Misc)) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs new file mode 100644 index 0000000000000..9cf56bef9578a --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -0,0 +1,2155 @@ +//! Things related to the Interner in the next-trait-solver. +#![allow(unused)] + +use base_db::Crate; +use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variances}; +use hir_def::lang_item::LangItem; +use hir_def::signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags}; +use hir_def::{AdtId, BlockId, GenericDefId, TypeAliasId, VariantId}; +use hir_def::{AttrDefId, Lookup}; +use hir_def::{CallableDefId, EnumVariantId, ItemContainerId, StructId, UnionId}; +use intern::sym::non_exhaustive; +use intern::{Interned, impl_internable, sym}; +use la_arena::Idx; +use rustc_abi::{Align, ReprFlags, ReprOptions}; +use rustc_hash::FxHashSet; +use rustc_index::bit_set::DenseBitSet; +use rustc_type_ir::elaborate::elaborate; +use rustc_type_ir::error::TypeError; +use rustc_type_ir::inherent::{ + AdtDef as _, GenericArgs as _, GenericsOf, IntoKind, SliceLike as _, Span as _, +}; +use rustc_type_ir::lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem}; +use rustc_type_ir::solve::SizedTraitKind; +use rustc_type_ir::{ + AliasTerm, AliasTermKind, AliasTy, AliasTyKind, EarlyBinder, FlagComputation, Flags, + ImplPolarity, InferTy, ProjectionPredicate, TraitPredicate, TraitRef, Upcast, +}; +use salsa::plumbing::AsId; +use smallvec::{SmallVec, smallvec}; +use std::fmt; +use std::ops::ControlFlow; +use syntax::ast::SelfParamKind; +use triomphe::Arc; + +use rustc_ast_ir::visit::VisitorResult; +use rustc_index::IndexVec; +use rustc_type_ir::TypeVisitableExt; +use rustc_type_ir::{ + BoundVar, CollectAndApply, DebruijnIndex, GenericArgKind, RegionKind, TermKind, UniverseIndex, + Variance, WithCachedTypeInfo, elaborate, + inherent::{self, Const as _, Region as _, Ty as _}, + ir_print, relate, +}; + +use crate::lower_nextsolver::{self, TyLoweringContext}; +use crate::method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint}; +use crate::next_solver::infer::InferCtxt; +use crate::next_solver::util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls}; +use crate::next_solver::{ + AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper, + CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, InternedWrapperNoDebug, + RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper, +}; +use crate::{ConstScalar, FnAbi, Interner, db::HirDatabase}; + +use super::generics::generics; +use super::util::sizedness_constraint_for_ty; +use super::{ + Binder, BoundExistentialPredicate, BoundExistentialPredicates, BoundTy, BoundTyKind, Clause, + Clauses, Const, ConstKind, ErrorGuaranteed, ExprConst, ExternalConstraints, + ExternalConstraintsData, GenericArg, GenericArgs, InternedClausesWrapper, ParamConst, ParamEnv, + ParamTy, PlaceholderConst, PlaceholderTy, PredefinedOpaques, PredefinedOpaquesData, Predicate, + PredicateKind, Term, Ty, TyKind, Tys, ValueConst, + abi::Safety, + fold::{BoundVarReplacer, BoundVarReplacerDelegate, FnMutDelegate}, + generics::Generics, + mapping::ChalkToNextSolver, + region::{ + BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, PlaceholderRegion, Region, + }, +}; +use super::{ClauseKind, SolverDefId, Valtree}; + +#[macro_export] +#[doc(hidden)] +macro_rules! _interned_vec_nolifetime_salsa { + ($name:ident, $ty:ty) => { + interned_vec_nolifetime_salsa!($name, $ty, nofold); + + impl<'db> rustc_type_ir::TypeFoldable> for $name { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + use rustc_type_ir::inherent::SliceLike as _; + let inner: smallvec::SmallVec<[_; 2]> = + self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; + Ok($name::new_(folder.cx().db(), inner)) + } + fn fold_with>>( + self, + folder: &mut F, + ) -> Self { + use rustc_type_ir::inherent::SliceLike as _; + let inner: smallvec::SmallVec<[_; 2]> = + self.iter().map(|v| v.fold_with(folder)).collect(); + $name::new_(folder.cx().db(), inner) + } + } + + impl<'db> rustc_type_ir::TypeVisitable> for $name { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + use rustc_ast_ir::visit::VisitorResult; + use rustc_type_ir::inherent::SliceLike as _; + rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); + V::Result::output() + } + } + }; + ($name:ident, $ty:ty, nofold) => { + #[salsa::interned(no_lifetime, constructor = new_, debug)] + pub struct $name { + #[returns(ref)] + inner_: smallvec::SmallVec<[$ty; 2]>, + } + + impl $name { + pub fn new_from_iter<'db>( + interner: DbInterner<'db>, + data: impl IntoIterator, + ) -> Self { + $name::new_(interner.db(), data.into_iter().collect::>()) + } + + pub fn inner(&self) -> &smallvec::SmallVec<[$ty; 2]> { + // SAFETY: ¯\_(ツ)_/¯ + salsa::with_attached_database(|db| { + let inner = self.inner_(db); + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } + } + + impl rustc_type_ir::inherent::SliceLike for $name { + type Item = $ty; + + type IntoIter = as IntoIterator>::IntoIter; + + fn iter(self) -> Self::IntoIter { + self.inner().clone().into_iter() + } + + fn as_slice(&self) -> &[Self::Item] { + self.inner().as_slice() + } + } + + impl IntoIterator for $name { + type Item = $ty; + type IntoIter = ::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + rustc_type_ir::inherent::SliceLike::iter(self) + } + } + + impl Default for $name { + fn default() -> Self { + $name::new_from_iter(DbInterner::conjure(), []) + } + } + }; +} + +pub use crate::_interned_vec_nolifetime_salsa as interned_vec_nolifetime_salsa; + +#[macro_export] +#[doc(hidden)] +macro_rules! _interned_vec_db { + ($name:ident, $ty:ident) => { + interned_vec_db!($name, $ty, nofold); + + impl<'db> rustc_type_ir::TypeFoldable> for $name<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + use rustc_type_ir::inherent::SliceLike as _; + let inner: smallvec::SmallVec<[_; 2]> = + self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; + Ok($name::new_(folder.cx().db(), inner)) + } + fn fold_with>>( + self, + folder: &mut F, + ) -> Self { + use rustc_type_ir::inherent::SliceLike as _; + let inner: smallvec::SmallVec<[_; 2]> = + self.iter().map(|v| v.fold_with(folder)).collect(); + $name::new_(folder.cx().db(), inner) + } + } + + impl<'db> rustc_type_ir::TypeVisitable> for $name<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + use rustc_ast_ir::visit::VisitorResult; + use rustc_type_ir::inherent::SliceLike as _; + rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); + V::Result::output() + } + } + }; + ($name:ident, $ty:ident, nofold) => { + #[salsa::interned(constructor = new_)] + pub struct $name<'db> { + #[returns(ref)] + inner_: smallvec::SmallVec<[$ty<'db>; 2]>, + } + + impl<'db> std::fmt::Debug for $name<'db> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_slice().fmt(fmt) + } + } + + impl<'db> $name<'db> { + pub fn new_from_iter( + interner: DbInterner<'db>, + data: impl IntoIterator>, + ) -> Self { + $name::new_(interner.db(), data.into_iter().collect::>()) + } + + pub fn inner(&self) -> &smallvec::SmallVec<[$ty<'db>; 2]> { + // SAFETY: ¯\_(ツ)_/¯ + salsa::with_attached_database(|db| { + let inner = self.inner_(db); + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } + } + + impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> { + type Item = $ty<'db>; + + type IntoIter = ; 2]> as IntoIterator>::IntoIter; + + fn iter(self) -> Self::IntoIter { + self.inner().clone().into_iter() + } + + fn as_slice(&self) -> &[Self::Item] { + self.inner().as_slice() + } + } + + impl<'db> IntoIterator for $name<'db> { + type Item = $ty<'db>; + type IntoIter = ::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + rustc_type_ir::inherent::SliceLike::iter(self) + } + } + + impl<'db> Default for $name<'db> { + fn default() -> Self { + $name::new_from_iter(DbInterner::conjure(), []) + } + } + }; +} + +pub use crate::_interned_vec_db as interned_vec_db; + +#[derive(Debug, Copy, Clone)] +pub struct DbInterner<'db> { + pub(crate) db: &'db dyn HirDatabase, + pub(crate) krate: Option, + pub(crate) block: Option, +} + +// FIXME: very wrong, see https://github.com/rust-lang/rust/pull/144808 +unsafe impl Send for DbInterner<'_> {} +unsafe impl Sync for DbInterner<'_> {} + +impl<'db> DbInterner<'db> { + // FIXME(next-solver): remove this method + pub fn conjure() -> DbInterner<'db> { + salsa::with_attached_database(|db| DbInterner { + db: unsafe { + std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(db.as_view()) + }, + krate: None, + block: None, + }) + .expect("db is expected to be attached") + } + + pub fn new_with( + db: &'db dyn HirDatabase, + krate: Option, + block: Option, + ) -> DbInterner<'db> { + DbInterner { db, krate, block } + } + + pub fn db(&self) -> &'db dyn HirDatabase { + self.db + } +} + +// This is intentionally left as `()` +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct Span(()); + +impl<'db> inherent::Span> for Span { + fn dummy() -> Self { + Span(()) + } +} + +interned_vec_nolifetime_salsa!(BoundVarKinds, BoundVarKind, nofold); + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] +pub enum BoundVarKind { + Ty(BoundTyKind), + Region(BoundRegionKind), + Const, +} + +impl BoundVarKind { + pub fn expect_region(self) -> BoundRegionKind { + match self { + BoundVarKind::Region(lt) => lt, + _ => panic!("expected a region, but found another kind"), + } + } + + pub fn expect_ty(self) -> BoundTyKind { + match self { + BoundVarKind::Ty(ty) => ty, + _ => panic!("expected a type, but found another kind"), + } + } + + pub fn expect_const(self) { + match self { + BoundVarKind::Const => (), + _ => panic!("expected a const, but found another kind"), + } + } +} + +interned_vec_db!(CanonicalVars, CanonicalVarKind, nofold); + +pub struct DepNodeIndex; + +#[derive(Debug)] +pub struct Tracked(T); + +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct Placeholder { + pub universe: UniverseIndex, + pub bound: T, +} + +impl std::fmt::Debug for Placeholder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result { + if self.universe == UniverseIndex::ROOT { + write!(f, "!{:?}", self.bound) + } else { + write!(f, "!{}_{:?}", self.universe.index(), self.bound) + } + } +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct AllocId; + +interned_vec_nolifetime_salsa!(VariancesOf, Variance, nofold); + +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct VariantIdx(usize); + +// FIXME: could/should store actual data? +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub enum VariantDef { + Struct(StructId), + Union(UnionId), + Enum(EnumVariantId), +} + +impl VariantDef { + pub fn id(&self) -> VariantId { + match self { + VariantDef::Struct(struct_id) => VariantId::StructId(*struct_id), + VariantDef::Union(union_id) => VariantId::UnionId(*union_id), + VariantDef::Enum(enum_variant_id) => VariantId::EnumVariantId(*enum_variant_id), + } + } + + pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Idx, FieldData)> { + let id: VariantId = match self { + VariantDef::Struct(it) => (*it).into(), + VariantDef::Union(it) => (*it).into(), + VariantDef::Enum(it) => (*it).into(), + }; + id.fields(db).fields().iter().map(|(id, data)| (id, data.clone())).collect() + } +} + +/* +/// Definition of a variant -- a struct's fields or an enum variant. +#[derive(Debug, HashStable, TyEncodable, TyDecodable)] +pub struct VariantDef { + /// `DefId` that identifies the variant itself. + /// If this variant belongs to a struct or union, then this is a copy of its `DefId`. + pub def_id: DefId, + /// `DefId` that identifies the variant's constructor. + /// If this variant is a struct variant, then this is `None`. + pub ctor: Option<(CtorKind, DefId)>, + /// Variant or struct name, maybe empty for anonymous adt (struct or union). + pub name: Symbol, + /// Discriminant of this variant. + pub discr: VariantDiscr, + /// Fields of this variant. + pub fields: IndexVec, + /// The error guarantees from parser, if any. + tainted: Option, + /// Flags of the variant (e.g. is field list non-exhaustive)? + flags: VariantFlags, +} +*/ + +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct AdtFlags { + is_enum: bool, + is_union: bool, + is_struct: bool, + is_phantom_data: bool, + is_fundamental: bool, + is_box: bool, + is_manually_drop: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AdtDefInner { + pub id: AdtId, + variants: Vec<(VariantIdx, VariantDef)>, + flags: AdtFlags, + repr: ReprOptions, +} + +// We're gonna cheat a little bit and implement `Hash` on only the `DefId` and +// accept there might be collisions for def ids from different crates (or across +// different tests, oh my). +impl std::hash::Hash for AdtDefInner { + #[inline] + fn hash(&self, s: &mut H) { + self.id.hash(s) + } +} + +#[salsa::interned(no_lifetime, constructor = new_)] +pub struct AdtDef { + #[returns(ref)] + data_: AdtDefInner, +} + +impl AdtDef { + pub fn new<'db>(def_id: AdtId, interner: DbInterner<'db>) -> Self { + let db = interner.db(); + let (flags, variants, repr) = match def_id { + AdtId::StructId(struct_id) => { + let data = db.struct_signature(struct_id); + + let flags = AdtFlags { + is_enum: false, + is_union: false, + is_struct: true, + is_phantom_data: data.flags.contains(StructFlags::IS_PHANTOM_DATA), + is_fundamental: data.flags.contains(StructFlags::FUNDAMENTAL), + is_box: data.flags.contains(StructFlags::IS_BOX), + is_manually_drop: data.flags.contains(StructFlags::IS_MANUALLY_DROP), + }; + + let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))]; + + let mut repr = ReprOptions::default(); + repr.align = data.repr.and_then(|r| r.align); + repr.pack = data.repr.and_then(|r| r.pack); + repr.int = data.repr.and_then(|r| r.int); + + let mut repr_flags = ReprFlags::empty(); + if flags.is_box { + repr_flags.insert(ReprFlags::IS_LINEAR); + } + if data.repr.is_some_and(|r| r.c()) { + repr_flags.insert(ReprFlags::IS_C); + } + if data.repr.is_some_and(|r| r.simd()) { + repr_flags.insert(ReprFlags::IS_SIMD); + } + repr.flags = repr_flags; + + (flags, variants, repr) + } + AdtId::UnionId(union_id) => { + let data = db.union_signature(union_id); + + let flags = AdtFlags { + is_enum: false, + is_union: true, + is_struct: false, + is_phantom_data: false, + is_fundamental: false, + is_box: false, + is_manually_drop: false, + }; + + let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))]; + + let mut repr = ReprOptions::default(); + repr.align = data.repr.and_then(|r| r.align); + repr.pack = data.repr.and_then(|r| r.pack); + repr.int = data.repr.and_then(|r| r.int); + + let mut repr_flags = ReprFlags::empty(); + if flags.is_box { + repr_flags.insert(ReprFlags::IS_LINEAR); + } + if data.repr.is_some_and(|r| r.c()) { + repr_flags.insert(ReprFlags::IS_C); + } + if data.repr.is_some_and(|r| r.simd()) { + repr_flags.insert(ReprFlags::IS_SIMD); + } + repr.flags = repr_flags; + + (flags, variants, repr) + } + AdtId::EnumId(enum_id) => { + let flags = AdtFlags { + is_enum: true, + is_union: false, + is_struct: false, + is_phantom_data: false, + is_fundamental: false, + is_box: false, + is_manually_drop: false, + }; + + let variants = enum_id + .enum_variants(db) + .variants + .iter() + .enumerate() + .map(|(idx, v)| (VariantIdx(idx), v)) + .map(|(idx, v)| (idx, VariantDef::Enum(v.0))) + .collect(); + + let data = db.enum_signature(enum_id); + + let mut repr = ReprOptions::default(); + repr.align = data.repr.and_then(|r| r.align); + repr.pack = data.repr.and_then(|r| r.pack); + repr.int = data.repr.and_then(|r| r.int); + + let mut repr_flags = ReprFlags::empty(); + if flags.is_box { + repr_flags.insert(ReprFlags::IS_LINEAR); + } + if data.repr.is_some_and(|r| r.c()) { + repr_flags.insert(ReprFlags::IS_C); + } + if data.repr.is_some_and(|r| r.simd()) { + repr_flags.insert(ReprFlags::IS_SIMD); + } + repr.flags = repr_flags; + + (flags, variants, repr) + } + }; + + AdtDef::new_(db, AdtDefInner { id: def_id, variants, flags, repr }) + } + + pub fn inner(&self) -> &AdtDefInner { + salsa::with_attached_database(|db| { + let inner = self.data_(db); + // SAFETY: ¯\_(ツ)_/¯ + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } + + pub fn is_enum(&self) -> bool { + self.inner().flags.is_enum + } + + #[inline] + pub fn repr(self) -> ReprOptions { + self.inner().repr + } + + /// Asserts this is a struct or union and returns its unique variant. + pub fn non_enum_variant(self) -> VariantDef { + assert!(self.inner().flags.is_struct || self.inner().flags.is_union); + self.inner().variants[0].1.clone() + } +} + +impl<'db> inherent::AdtDef> for AdtDef { + fn def_id(self) -> AdtIdWrapper { + self.inner().id.into() + } + + fn is_struct(self) -> bool { + self.inner().flags.is_struct + } + + fn is_phantom_data(self) -> bool { + self.inner().flags.is_phantom_data + } + + fn is_fundamental(self) -> bool { + self.inner().flags.is_fundamental + } + + fn struct_tail_ty( + self, + interner: DbInterner<'db>, + ) -> Option, Ty<'db>>> { + let db = interner.db(); + let hir_def::AdtId::StructId(struct_id) = self.inner().id else { + return None; + }; + let id: VariantId = struct_id.into(); + let field_types = interner.db().field_types_ns(id); + + field_types.iter().last().map(|f| *f.1) + } + + fn all_field_tys( + self, + interner: DbInterner<'db>, + ) -> EarlyBinder, impl IntoIterator>> { + let db = interner.db(); + // FIXME: this is disabled just to match the behavior with chalk right now + let field_tys = |id: VariantId| { + let variant_data = id.fields(db); + let fields = if variant_data.fields().is_empty() { + vec![] + } else { + let field_types = db.field_types_ns(id); + variant_data + .fields() + .iter() + .map(|(idx, _)| { + let ty = field_types[idx]; + ty.skip_binder() + }) + .collect() + }; + }; + let field_tys = |id: VariantId| vec![]; + let tys: Vec<_> = match self.inner().id { + hir_def::AdtId::StructId(id) => field_tys(id.into()), + hir_def::AdtId::UnionId(id) => field_tys(id.into()), + hir_def::AdtId::EnumId(id) => id + .enum_variants(db) + .variants + .iter() + .flat_map(|&(variant_id, _, _)| field_tys(variant_id.into())) + .collect(), + }; + + EarlyBinder::bind(tys) + } + + fn sizedness_constraint( + self, + interner: DbInterner<'db>, + sizedness: SizedTraitKind, + ) -> Option, Ty<'db>>> { + if self.is_struct() { + let tail_ty = self.all_field_tys(interner).skip_binder().into_iter().last()?; + + let constraint_ty = sizedness_constraint_for_ty(interner, sizedness, tail_ty)?; + + Some(EarlyBinder::bind(constraint_ty)) + } else { + None + } + } + + fn destructor( + self, + interner: DbInterner<'db>, + ) -> Option { + // FIXME(next-solver) + None + } + + fn is_manually_drop(self) -> bool { + self.inner().flags.is_manually_drop + } +} + +impl fmt::Debug for AdtDef { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + salsa::with_attached_database(|db| match self.inner().id { + AdtId::StructId(struct_id) => { + let data = db.as_view::().struct_signature(struct_id); + f.write_str(data.name.as_str()) + } + AdtId::UnionId(union_id) => { + let data = db.as_view::().union_signature(union_id); + f.write_str(data.name.as_str()) + } + AdtId::EnumId(enum_id) => { + let data = db.as_view::().enum_signature(enum_id); + f.write_str(data.name.as_str()) + } + }) + .unwrap_or_else(|| f.write_str(&format!("AdtDef({:?})", self.inner().id))) + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] +pub struct Features; + +impl<'db> inherent::Features> for Features { + fn generic_const_exprs(self) -> bool { + false + } + + fn coroutine_clone(self) -> bool { + false + } + + fn associated_const_equality(self) -> bool { + false + } + + fn feature_bound_holds_in_crate(self, symbol: ()) -> bool { + false + } +} + +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct UnsizingParams(pub(crate) DenseBitSet); + +impl std::ops::Deref for UnsizingParams { + type Target = DenseBitSet; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +pub type PatternKind<'db> = rustc_type_ir::PatternKind>; + +#[salsa::interned(constructor = new_, debug)] +pub struct Pattern<'db> { + #[returns(ref)] + kind_: InternedWrapperNoDebug>, +} + +impl<'db> std::fmt::Debug for InternedWrapperNoDebug> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl<'db> Pattern<'db> { + pub fn new(interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self { + Pattern::new_(interner.db(), InternedWrapperNoDebug(kind)) + } + + pub fn inner(&self) -> &PatternKind<'db> { + salsa::with_attached_database(|db| { + let inner = &self.kind_(db).0; + // SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will + // make sure that our returned value is valid for the lifetime `'db`. + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } +} + +impl<'db> Flags for Pattern<'db> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + match self.inner() { + PatternKind::Range { start, end } => { + FlagComputation::for_const_kind(&start.kind()).flags + | FlagComputation::for_const_kind(&end.kind()).flags + } + PatternKind::Or(pats) => { + let mut flags = pats.as_slice()[0].flags(); + for pat in pats.as_slice()[1..].iter() { + flags |= pat.flags(); + } + flags + } + } + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + match self.inner() { + PatternKind::Range { start, end } => { + start.outer_exclusive_binder().max(end.outer_exclusive_binder()) + } + PatternKind::Or(pats) => { + let mut idx = pats.as_slice()[0].outer_exclusive_binder(); + for pat in pats.as_slice()[1..].iter() { + idx = idx.max(pat.outer_exclusive_binder()); + } + idx + } + } + } +} + +impl<'db> rustc_type_ir::inherent::IntoKind for Pattern<'db> { + type Kind = rustc_type_ir::PatternKind>; + fn kind(self) -> Self::Kind { + *self.inner() + } +} + +impl<'db> rustc_type_ir::relate::Relate> for Pattern<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + let tcx = relation.cx(); + match (a.kind(), b.kind()) { + ( + PatternKind::Range { start: start_a, end: end_a }, + PatternKind::Range { start: start_b, end: end_b }, + ) => { + let start = relation.relate(start_a, start_b)?; + let end = relation.relate(end_a, end_b)?; + Ok(Pattern::new(tcx, PatternKind::Range { start, end })) + } + (PatternKind::Or(a), PatternKind::Or(b)) => { + if a.len() != b.len() { + return Err(TypeError::Mismatch); + } + let pats = CollectAndApply::collect_and_apply( + std::iter::zip(a.iter(), b.iter()).map(|(a, b)| relation.relate(a, b)), + |g| PatList::new_from_iter(tcx, g.iter().cloned()), + )?; + Ok(Pattern::new(tcx, PatternKind::Or(pats))) + } + (PatternKind::Range { .. } | PatternKind::Or(_), _) => Err(TypeError::Mismatch), + } + } +} + +interned_vec_db!(PatList, Pattern); + +macro_rules! as_lang_item { + ( + $solver_enum:ident, $var:ident; + + ignore = { + $( $ignore:ident ),* $(,)? + } + + $( $variant:ident ),* $(,)? + ) => {{ + // Ensure exhaustiveness. + if let Some(it) = None::<$solver_enum> { + match it { + $( $solver_enum::$variant => {} )* + $( $solver_enum::$ignore => {} )* + } + } + match $var { + $( LangItem::$variant => Some($solver_enum::$variant), )* + _ => None + } + }}; +} + +impl<'db> rustc_type_ir::Interner for DbInterner<'db> { + type DefId = SolverDefId; + type LocalDefId = SolverDefId; + type LocalDefIds = SolverDefIds; + type TraitId = TraitIdWrapper; + type ForeignId = TypeAliasIdWrapper; + type FunctionId = CallableIdWrapper; + type ClosureId = ClosureIdWrapper; + type CoroutineClosureId = CoroutineIdWrapper; + type CoroutineId = CoroutineIdWrapper; + type AdtId = AdtIdWrapper; + type ImplId = ImplIdWrapper; + type Span = Span; + + type GenericArgs = GenericArgs<'db>; + type GenericArgsSlice = GenericArgs<'db>; + type GenericArg = GenericArg<'db>; + + type Term = Term<'db>; + + type BoundVarKinds = BoundVarKinds; + type BoundVarKind = BoundVarKind; + + type PredefinedOpaques = PredefinedOpaques<'db>; + + fn mk_predefined_opaques_in_body( + self, + data: rustc_type_ir::solve::PredefinedOpaquesData, + ) -> Self::PredefinedOpaques { + PredefinedOpaques::new(self, data) + } + + type CanonicalVarKinds = CanonicalVars<'db>; + + fn mk_canonical_var_kinds( + self, + kinds: &[rustc_type_ir::CanonicalVarKind], + ) -> Self::CanonicalVarKinds { + CanonicalVars::new_from_iter(self, kinds.iter().cloned()) + } + + type ExternalConstraints = ExternalConstraints<'db>; + + fn mk_external_constraints( + self, + data: rustc_type_ir::solve::ExternalConstraintsData, + ) -> Self::ExternalConstraints { + ExternalConstraints::new(self, data) + } + + type DepNodeIndex = DepNodeIndex; + + type Tracked = Tracked; + + type Ty = Ty<'db>; + type Tys = Tys<'db>; + type FnInputTys = Tys<'db>; + type ParamTy = ParamTy; + type BoundTy = BoundTy; + type PlaceholderTy = PlaceholderTy; + type Symbol = (); + + type ErrorGuaranteed = ErrorGuaranteed; + type BoundExistentialPredicates = BoundExistentialPredicates<'db>; + type AllocId = AllocId; + type Pat = Pattern<'db>; + type PatList = PatList<'db>; + type Safety = Safety; + type Abi = FnAbi; + + type Const = Const<'db>; + type PlaceholderConst = PlaceholderConst; + type ParamConst = ParamConst; + type BoundConst = BoundConst; + type ValueConst = ValueConst<'db>; + type ValTree = Valtree<'db>; + type ExprConst = ExprConst; + + type Region = Region<'db>; + type EarlyParamRegion = EarlyParamRegion; + type LateParamRegion = LateParamRegion; + type BoundRegion = BoundRegion; + type PlaceholderRegion = PlaceholderRegion; + + type RegionAssumptions = RegionAssumptions<'db>; + + type ParamEnv = ParamEnv<'db>; + type Predicate = Predicate<'db>; + type Clause = Clause<'db>; + type Clauses = Clauses<'db>; + + type GenericsOf = Generics; + + type VariancesOf = VariancesOf; + + type AdtDef = AdtDef; + + type Features = Features; + + fn mk_args(self, args: &[Self::GenericArg]) -> Self::GenericArgs { + GenericArgs::new_from_iter(self, args.iter().cloned()) + } + + fn mk_args_from_iter(self, args: I) -> T::Output + where + I: Iterator, + T: rustc_type_ir::CollectAndApply, + { + CollectAndApply::collect_and_apply(args, |g| { + GenericArgs::new_from_iter(self, g.iter().cloned()) + }) + } + + type UnsizingParams = UnsizingParams; + + fn mk_tracked( + self, + data: T, + dep_node: Self::DepNodeIndex, + ) -> Self::Tracked { + Tracked(data) + } + + fn get_tracked(self, tracked: &Self::Tracked) -> T { + tracked.0.clone() + } + + fn with_cached_task(self, task: impl FnOnce() -> T) -> (T, Self::DepNodeIndex) { + (task(), DepNodeIndex) + } + + fn with_global_cache( + self, + f: impl FnOnce(&mut rustc_type_ir::search_graph::GlobalCache) -> R, + ) -> R { + salsa::with_attached_database(|db| { + tls_cache::with_cache( + unsafe { + std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>( + db.as_view::(), + ) + }, + f, + ) + }) + .unwrap() + } + + fn canonical_param_env_cache_get_or_insert( + self, + param_env: Self::ParamEnv, + f: impl FnOnce() -> rustc_type_ir::CanonicalParamEnvCacheEntry, + from_entry: impl FnOnce(&rustc_type_ir::CanonicalParamEnvCacheEntry) -> R, + ) -> R { + from_entry(&f()) + } + + fn evaluation_is_concurrent(&self) -> bool { + false + } + + fn expand_abstract_consts>(self, _: T) -> T { + unreachable!("only used by the old trait solver in rustc"); + } + + fn generics_of(self, def_id: Self::DefId) -> Self::GenericsOf { + generics(self.db(), def_id) + } + + fn variances_of(self, def_id: Self::DefId) -> Self::VariancesOf { + let generic_def = match def_id { + SolverDefId::FunctionId(def_id) => def_id.into(), + SolverDefId::AdtId(def_id) => def_id.into(), + SolverDefId::Ctor(Ctor::Struct(def_id)) => def_id.into(), + SolverDefId::Ctor(Ctor::Enum(def_id)) => def_id.loc(self.db).parent.into(), + SolverDefId::InternedOpaqueTyId(_def_id) => { + // FIXME(next-solver): track variances + // + // We compute them based on the only `Ty` level info in rustc, + // move `variances_of_opaque` into `rustc_next_trait_solver` for reuse. + return VariancesOf::new_from_iter( + self, + (0..self.generics_of(def_id).count()).map(|_| Variance::Invariant), + ); + } + _ => return VariancesOf::new_from_iter(self, []), + }; + VariancesOf::new_from_iter( + self, + self.db() + .variances_of(generic_def) + .as_deref() + .unwrap_or_default() + .iter() + .map(|v| v.to_nextsolver(self)), + ) + } + + fn type_of(self, def_id: Self::DefId) -> EarlyBinder { + match def_id { + SolverDefId::TypeAliasId(id) => { + use hir_def::Lookup; + match id.lookup(self.db()).container { + ItemContainerId::ImplId(it) => it, + _ => panic!("assoc ty value should be in impl"), + }; + self.db().ty_ns(id.into()) + } + SolverDefId::AdtId(id) => self.db().ty_ns(id.into()), + // FIXME(next-solver): This uses the types of `query mir_borrowck` in rustc. + // + // We currently always use the type from HIR typeck which ignores regions. This + // should be fine. + SolverDefId::InternedOpaqueTyId(_) => self.type_of_opaque_hir_typeck(def_id), + SolverDefId::FunctionId(id) => self.db.value_ty_ns(id.into()).unwrap(), + SolverDefId::Ctor(id) => { + let id = match id { + Ctor::Struct(id) => id.into(), + Ctor::Enum(id) => id.into(), + }; + self.db + .value_ty_ns(id) + .expect("`SolverDefId::Ctor` should have a function-like ctor") + } + _ => panic!("Unexpected def_id `{def_id:?}` provided for `type_of`"), + } + } + + fn adt_def(self, def_id: Self::AdtId) -> Self::AdtDef { + AdtDef::new(def_id.0, self) + } + + fn alias_ty_kind(self, alias: rustc_type_ir::AliasTy) -> AliasTyKind { + match alias.def_id { + SolverDefId::InternedOpaqueTyId(_) => AliasTyKind::Opaque, + SolverDefId::TypeAliasId(_) => AliasTyKind::Projection, + _ => unimplemented!("Unexpected alias: {:?}", alias.def_id), + } + } + + fn alias_term_kind( + self, + alias: rustc_type_ir::AliasTerm, + ) -> rustc_type_ir::AliasTermKind { + match alias.def_id { + SolverDefId::InternedOpaqueTyId(_) => AliasTermKind::OpaqueTy, + SolverDefId::TypeAliasId(_) => AliasTermKind::ProjectionTy, + SolverDefId::ConstId(_) => AliasTermKind::UnevaluatedConst, + _ => unimplemented!("Unexpected alias: {:?}", alias.def_id), + } + } + + fn trait_ref_and_own_args_for_alias( + self, + def_id: Self::DefId, + args: Self::GenericArgs, + ) -> (rustc_type_ir::TraitRef, Self::GenericArgsSlice) { + let trait_def_id = self.parent(def_id); + let trait_generics = self.generics_of(trait_def_id); + let trait_args = GenericArgs::new_from_iter( + self, + args.as_slice()[0..trait_generics.own_params.len()].iter().cloned(), + ); + let alias_args = + GenericArgs::new_from_iter(self, args.iter().skip(trait_generics.own_params.len())); + (TraitRef::new_from_args(self, trait_def_id.try_into().unwrap(), trait_args), alias_args) + } + + fn check_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs) -> bool { + // FIXME + true + } + + fn debug_assert_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs) {} + + fn debug_assert_existential_args_compatible( + self, + def_id: Self::DefId, + args: Self::GenericArgs, + ) { + } + + fn mk_type_list_from_iter(self, args: I) -> T::Output + where + I: Iterator, + T: rustc_type_ir::CollectAndApply, + { + CollectAndApply::collect_and_apply(args, |g| Tys::new_from_iter(self, g.iter().cloned())) + } + + fn parent(self, def_id: Self::DefId) -> Self::DefId { + use hir_def::Lookup; + + let container = match def_id { + SolverDefId::FunctionId(it) => it.lookup(self.db()).container, + SolverDefId::TypeAliasId(it) => it.lookup(self.db()).container, + SolverDefId::ConstId(it) => it.lookup(self.db()).container, + SolverDefId::InternedClosureId(it) => { + return self + .db() + .lookup_intern_closure(it) + .0 + .as_generic_def_id(self.db()) + .unwrap() + .into(); + } + SolverDefId::InternedCoroutineId(it) => { + return self + .db() + .lookup_intern_coroutine(it) + .0 + .as_generic_def_id(self.db()) + .unwrap() + .into(); + } + SolverDefId::StaticId(_) + | SolverDefId::AdtId(_) + | SolverDefId::TraitId(_) + | SolverDefId::ImplId(_) + | SolverDefId::Ctor(..) + | SolverDefId::InternedOpaqueTyId(..) => panic!(), + }; + + match container { + ItemContainerId::ImplId(it) => it.into(), + ItemContainerId::TraitId(it) => it.into(), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => panic!(), + } + } + + fn recursion_limit(self) -> usize { + 50 + } + + fn features(self) -> Self::Features { + Features + } + + fn fn_sig( + self, + def_id: Self::FunctionId, + ) -> EarlyBinder>> { + self.db().callable_item_signature_ns(def_id.0) + } + + fn coroutine_movability(self, def_id: Self::CoroutineId) -> rustc_ast_ir::Movability { + unimplemented!() + } + + fn coroutine_for_closure(self, def_id: Self::CoroutineId) -> Self::CoroutineId { + unimplemented!() + } + + fn generics_require_sized_self(self, def_id: Self::DefId) -> bool { + let sized_trait = + LangItem::Sized.resolve_trait(self.db(), self.krate.expect("Must have self.krate")); + let Some(sized_id) = sized_trait else { + return false; /* No Sized trait, can't require it! */ + }; + let sized_def_id = sized_id.into(); + + // Search for a predicate like `Self : Sized` amongst the trait bounds. + let predicates = self.predicates_of(def_id); + elaborate(self, predicates.iter_identity()).any(|pred| match pred.kind().skip_binder() { + ClauseKind::Trait(ref trait_pred) => { + trait_pred.def_id() == sized_def_id + && matches!( + trait_pred.self_ty().kind(), + TyKind::Param(ParamTy { index: 0, .. }) + ) + } + ClauseKind::RegionOutlives(_) + | ClauseKind::TypeOutlives(_) + | ClauseKind::Projection(_) + | ClauseKind::ConstArgHasType(_, _) + | ClauseKind::WellFormed(_) + | ClauseKind::ConstEvaluatable(_) + | ClauseKind::HostEffect(..) + | ClauseKind::UnstableFeature(_) => false, + }) + } + + #[tracing::instrument(skip(self), ret)] + fn item_bounds( + self, + def_id: Self::DefId, + ) -> EarlyBinder> { + explicit_item_bounds(self, def_id).map_bound(|bounds| { + Clauses::new_from_iter(self, elaborate(self, bounds).collect::>()) + }) + } + + #[tracing::instrument(skip(self), ret)] + fn item_self_bounds( + self, + def_id: Self::DefId, + ) -> EarlyBinder> { + explicit_item_bounds(self, def_id).map_bound(|bounds| { + Clauses::new_from_iter( + self, + elaborate(self, bounds).filter_only_self().collect::>(), + ) + }) + } + + fn item_non_self_bounds( + self, + def_id: Self::DefId, + ) -> EarlyBinder> { + let all_bounds: FxHashSet<_> = self.item_bounds(def_id).skip_binder().into_iter().collect(); + let own_bounds: FxHashSet<_> = + self.item_self_bounds(def_id).skip_binder().into_iter().collect(); + if all_bounds.len() == own_bounds.len() { + EarlyBinder::bind(Clauses::new_from_iter(self, [])) + } else { + EarlyBinder::bind(Clauses::new_from_iter( + self, + all_bounds.difference(&own_bounds).cloned(), + )) + } + } + + #[tracing::instrument(level = "debug", skip(self), ret)] + fn predicates_of( + self, + def_id: Self::DefId, + ) -> EarlyBinder> { + let predicates = self.db().generic_predicates_ns(def_id.try_into().unwrap()); + let predicates: Vec<_> = predicates.iter().cloned().collect(); + EarlyBinder::bind(predicates.into_iter()) + } + + #[tracing::instrument(level = "debug", skip(self), ret)] + fn own_predicates_of( + self, + def_id: Self::DefId, + ) -> EarlyBinder> { + let predicates = self.db().generic_predicates_without_parent_ns(def_id.try_into().unwrap()); + let predicates: Vec<_> = predicates.iter().cloned().collect(); + EarlyBinder::bind(predicates.into_iter()) + } + + #[tracing::instrument(skip(self), ret)] + fn explicit_super_predicates_of( + self, + def_id: Self::TraitId, + ) -> EarlyBinder> { + let is_self = |ty: Ty<'db>| match ty.kind() { + rustc_type_ir::TyKind::Param(param) => param.index == 0, + _ => false, + }; + + let predicates: Vec<(Clause<'db>, Span)> = self + .db() + .generic_predicates_ns(def_id.0.into()) + .iter() + .filter(|p| match p.kind().skip_binder() { + // rustc has the following assertion: + // https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608 + rustc_type_ir::ClauseKind::Trait(it) => is_self(it.self_ty()), + rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self(it.0), + rustc_type_ir::ClauseKind::Projection(it) => is_self(it.self_ty()), + rustc_type_ir::ClauseKind::HostEffect(it) => is_self(it.self_ty()), + _ => false, + }) + .cloned() + .map(|p| (p, Span::dummy())) + .collect(); + EarlyBinder::bind(predicates) + } + + #[tracing::instrument(skip(self), ret)] + fn explicit_implied_predicates_of( + self, + def_id: Self::DefId, + ) -> EarlyBinder> { + fn is_self_or_assoc(ty: Ty<'_>) -> bool { + match ty.kind() { + rustc_type_ir::TyKind::Param(param) => param.index == 0, + rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Projection, alias) => { + is_self_or_assoc(alias.self_ty()) + } + _ => false, + } + } + + let predicates: Vec<(Clause<'db>, Span)> = self + .db() + .generic_predicates_ns(def_id.try_into().unwrap()) + .iter() + .filter(|p| match p.kind().skip_binder() { + rustc_type_ir::ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()), + rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0), + rustc_type_ir::ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()), + rustc_type_ir::ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()), + // FIXME: Not sure is this correct to allow other clauses but we might replace + // `generic_predicates_ns` query here with something closer to rustc's + // `implied_bounds_with_filter`, which is more granular lowering than this + // "lower at once and then filter" implementation. + _ => true, + }) + .cloned() + .map(|p| (p, Span::dummy())) + .collect(); + EarlyBinder::bind(predicates) + } + + fn impl_super_outlives( + self, + impl_id: Self::ImplId, + ) -> EarlyBinder> { + let trait_ref = self.db().impl_trait_ns(impl_id.0).expect("expected an impl of trait"); + trait_ref.map_bound(|trait_ref| { + let clause: Clause<'_> = trait_ref.upcast(self); + Clauses::new_from_iter( + self, + rustc_type_ir::elaborate::elaborate(self, [clause]).filter(|clause| { + matches!( + clause.kind().skip_binder(), + ClauseKind::TypeOutlives(_) | ClauseKind::RegionOutlives(_) + ) + }), + ) + }) + } + + fn const_conditions( + self, + def_id: Self::DefId, + ) -> EarlyBinder< + Self, + impl IntoIterator>>, + > { + EarlyBinder::bind([unimplemented!()]) + } + + fn has_target_features(self, def_id: Self::FunctionId) -> bool { + false + } + + fn require_lang_item(self, lang_item: SolverLangItem) -> Self::DefId { + let lang_item = match lang_item { + SolverLangItem::AsyncFnKindUpvars => unimplemented!(), + SolverLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput, + SolverLangItem::CallOnceFuture => LangItem::CallOnceFuture, + SolverLangItem::CallRefFuture => LangItem::CallRefFuture, + SolverLangItem::CoroutineReturn => LangItem::CoroutineReturn, + SolverLangItem::CoroutineYield => LangItem::CoroutineYield, + SolverLangItem::DynMetadata => LangItem::DynMetadata, + SolverLangItem::FutureOutput => LangItem::FutureOutput, + SolverLangItem::Metadata => LangItem::Metadata, + }; + let target = hir_def::lang_item::lang_item( + self.db(), + self.krate.expect("Must have self.krate"), + lang_item, + ) + .unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found.")); + match target { + hir_def::lang_item::LangItemTarget::EnumId(enum_id) => enum_id.into(), + hir_def::lang_item::LangItemTarget::Function(function_id) => function_id.into(), + hir_def::lang_item::LangItemTarget::ImplDef(impl_id) => impl_id.into(), + hir_def::lang_item::LangItemTarget::Static(static_id) => static_id.into(), + hir_def::lang_item::LangItemTarget::Struct(struct_id) => struct_id.into(), + hir_def::lang_item::LangItemTarget::Union(union_id) => union_id.into(), + hir_def::lang_item::LangItemTarget::TypeAlias(type_alias_id) => type_alias_id.into(), + hir_def::lang_item::LangItemTarget::Trait(trait_id) => trait_id.into(), + hir_def::lang_item::LangItemTarget::EnumVariant(enum_variant_id) => unimplemented!(), + } + } + + fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> TraitIdWrapper { + let lang_item = match lang_item { + SolverTraitLangItem::AsyncFn => LangItem::AsyncFn, + SolverTraitLangItem::AsyncFnKindHelper => unimplemented!(), + SolverTraitLangItem::AsyncFnMut => LangItem::AsyncFnMut, + SolverTraitLangItem::AsyncFnOnce => LangItem::AsyncFnOnce, + SolverTraitLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput, + SolverTraitLangItem::AsyncIterator => unimplemented!(), + SolverTraitLangItem::Clone => LangItem::Clone, + SolverTraitLangItem::Copy => LangItem::Copy, + SolverTraitLangItem::Coroutine => LangItem::Coroutine, + SolverTraitLangItem::Destruct => LangItem::Destruct, + SolverTraitLangItem::DiscriminantKind => LangItem::DiscriminantKind, + SolverTraitLangItem::Drop => LangItem::Drop, + SolverTraitLangItem::Fn => LangItem::Fn, + SolverTraitLangItem::FnMut => LangItem::FnMut, + SolverTraitLangItem::FnOnce => LangItem::FnOnce, + SolverTraitLangItem::FnPtrTrait => LangItem::FnPtrTrait, + SolverTraitLangItem::FusedIterator => unimplemented!(), + SolverTraitLangItem::Future => LangItem::Future, + SolverTraitLangItem::Iterator => LangItem::Iterator, + SolverTraitLangItem::PointeeTrait => LangItem::PointeeTrait, + SolverTraitLangItem::Sized => LangItem::Sized, + SolverTraitLangItem::MetaSized => LangItem::MetaSized, + SolverTraitLangItem::PointeeSized => LangItem::PointeeSized, + SolverTraitLangItem::TransmuteTrait => LangItem::TransmuteTrait, + SolverTraitLangItem::Tuple => LangItem::Tuple, + SolverTraitLangItem::Unpin => LangItem::Unpin, + SolverTraitLangItem::Unsize => LangItem::Unsize, + SolverTraitLangItem::BikeshedGuaranteedNoDrop => { + unimplemented!() + } + }; + lang_item + .resolve_trait(self.db(), self.krate.expect("Must have self.krate")) + .unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found.")) + .into() + } + + fn require_adt_lang_item(self, lang_item: SolverAdtLangItem) -> AdtIdWrapper { + let lang_item = match lang_item { + SolverAdtLangItem::Option => LangItem::Option, + SolverAdtLangItem::Poll => LangItem::Poll, + }; + lang_item + .resolve_adt(self.db(), self.krate.expect("Must have self.krate")) + .unwrap_or_else(|| panic!("Lang item {lang_item:?} required but not found.")) + .into() + } + + fn is_lang_item(self, def_id: Self::DefId, lang_item: SolverLangItem) -> bool { + self.as_lang_item(def_id) + .map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item)) + } + + fn is_trait_lang_item(self, def_id: Self::TraitId, lang_item: SolverTraitLangItem) -> bool { + self.as_trait_lang_item(def_id) + .map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item)) + } + + fn is_adt_lang_item(self, def_id: Self::AdtId, lang_item: SolverAdtLangItem) -> bool { + // FIXME: derive PartialEq on SolverTraitLangItem + self.as_adt_lang_item(def_id) + .map_or(false, |l| std::mem::discriminant(&l) == std::mem::discriminant(&lang_item)) + } + + fn as_lang_item(self, def_id: Self::DefId) -> Option { + let def_id: AttrDefId = match def_id { + SolverDefId::TraitId(id) => id.into(), + SolverDefId::TypeAliasId(id) => id.into(), + SolverDefId::AdtId(id) => id.into(), + _ => panic!("Unexpected SolverDefId in as_lang_item"), + }; + let lang_item = self.db().lang_attr(def_id)?; + as_lang_item!( + SolverLangItem, lang_item; + + ignore = { + AsyncFnKindUpvars, + } + + Metadata, + DynMetadata, + CoroutineReturn, + CoroutineYield, + FutureOutput, + AsyncFnOnceOutput, + CallRefFuture, + CallOnceFuture, + AsyncFnOnceOutput, + ) + } + + fn as_trait_lang_item(self, def_id: Self::TraitId) -> Option { + let def_id: AttrDefId = def_id.0.into(); + let lang_item = self.db().lang_attr(def_id)?; + as_lang_item!( + SolverTraitLangItem, lang_item; + + ignore = { + AsyncFnKindHelper, + AsyncIterator, + BikeshedGuaranteedNoDrop, + FusedIterator, + } + + Sized, + MetaSized, + PointeeSized, + Unsize, + Copy, + Clone, + DiscriminantKind, + PointeeTrait, + FnPtrTrait, + Drop, + Destruct, + TransmuteTrait, + Fn, + FnMut, + FnOnce, + Future, + Coroutine, + Unpin, + Tuple, + Iterator, + AsyncFn, + AsyncFnMut, + AsyncFnOnce, + AsyncFnOnceOutput, + AsyncFnOnceOutput, + ) + } + + fn as_adt_lang_item(self, def_id: Self::AdtId) -> Option { + let def_id: AttrDefId = def_id.0.into(); + let lang_item = self.db().lang_attr(def_id)?; + as_lang_item!( + SolverAdtLangItem, lang_item; + + ignore = {} + + Option, + Poll, + ) + } + + fn associated_type_def_ids(self, def_id: Self::DefId) -> impl IntoIterator { + let trait_ = match def_id { + SolverDefId::TraitId(id) => id, + _ => unreachable!(), + }; + trait_.trait_items(self.db()).associated_types().map(|id| id.into()) + } + + fn for_each_relevant_impl( + self, + trait_: Self::TraitId, + self_ty: Self::Ty, + mut f: impl FnMut(Self::ImplId), + ) { + let trait_ = trait_.0; + let self_ty_fp = TyFingerprint::for_trait_impl_ns(&self_ty); + let fps: &[TyFingerprint] = match self_ty.kind() { + TyKind::Infer(InferTy::IntVar(..)) => &ALL_INT_FPS, + TyKind::Infer(InferTy::FloatVar(..)) => &ALL_FLOAT_FPS, + _ => self_ty_fp.as_slice(), + }; + + if fps.is_empty() { + for_trait_impls( + self.db(), + self.krate.expect("Must have self.krate"), + self.block, + trait_, + self_ty_fp, + |impls| { + for i in impls.for_trait(trait_) { + use rustc_type_ir::TypeVisitable; + let contains_errors = self.db().impl_trait_ns(i).map_or(false, |b| { + b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break() + }); + if contains_errors { + continue; + } + + f(i.into()); + } + ControlFlow::Continue(()) + }, + ); + } else { + for_trait_impls( + self.db(), + self.krate.expect("Must have self.krate"), + self.block, + trait_, + self_ty_fp, + |impls| { + for fp in fps { + for i in impls.for_trait_and_self_ty(trait_, *fp) { + use rustc_type_ir::TypeVisitable; + let contains_errors = self.db().impl_trait_ns(i).map_or(false, |b| { + b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break() + }); + if contains_errors { + continue; + } + + f(i.into()); + } + } + ControlFlow::Continue(()) + }, + ); + } + } + + fn for_each_blanket_impl(self, trait_def_id: Self::TraitId, mut f: impl FnMut(Self::ImplId)) { + let Some(krate) = self.krate else { return }; + + for impls in self.db.trait_impls_in_deps(krate).iter() { + for impl_id in impls.for_trait(trait_def_id.0) { + let impl_data = self.db.impl_signature(impl_id); + let self_ty_ref = &impl_data.store[impl_data.self_ty]; + if matches!(self_ty_ref, hir_def::type_ref::TypeRef::TypeParam(_)) { + f(impl_id.into()); + } + } + } + } + + fn has_item_definition(self, def_id: Self::DefId) -> bool { + // FIXME(next-solver): should check if the associated item has a value. + true + } + + fn impl_is_default(self, impl_def_id: Self::ImplId) -> bool { + // FIXME + false + } + + #[tracing::instrument(skip(self), ret)] + fn impl_trait_ref( + self, + impl_id: Self::ImplId, + ) -> EarlyBinder> { + let db = self.db(); + db.impl_trait_ns(impl_id.0) + // ImplIds for impls where the trait ref can't be resolved should never reach trait solving + .expect("invalid impl passed to trait solver") + } + + fn impl_polarity(self, impl_id: Self::ImplId) -> rustc_type_ir::ImplPolarity { + let impl_data = self.db().impl_signature(impl_id.0); + if impl_data.flags.contains(ImplFlags::NEGATIVE) { + ImplPolarity::Negative + } else { + ImplPolarity::Positive + } + } + + fn trait_is_auto(self, trait_: Self::TraitId) -> bool { + let trait_data = self.db().trait_signature(trait_.0); + trait_data.flags.contains(TraitFlags::AUTO) + } + + fn trait_is_alias(self, trait_: Self::TraitId) -> bool { + let trait_data = self.db().trait_signature(trait_.0); + trait_data.flags.contains(TraitFlags::ALIAS) + } + + fn trait_is_dyn_compatible(self, trait_: Self::TraitId) -> bool { + crate::dyn_compatibility::dyn_compatibility(self.db(), trait_.0).is_none() + } + + fn trait_is_fundamental(self, trait_: Self::TraitId) -> bool { + let trait_data = self.db().trait_signature(trait_.0); + trait_data.flags.contains(TraitFlags::FUNDAMENTAL) + } + + fn trait_may_be_implemented_via_object(self, trait_def_id: Self::TraitId) -> bool { + // FIXME(next-solver): should check the `TraitFlags` for + // the `#[rustc_do_not_implement_via_object]` flag + true + } + + fn is_impl_trait_in_trait(self, def_id: Self::DefId) -> bool { + // FIXME(next-solver) + false + } + + fn delay_bug(self, msg: impl ToString) -> Self::ErrorGuaranteed { + panic!("Bug encountered in next-trait-solver.") + } + + fn is_general_coroutine(self, coroutine_def_id: Self::CoroutineId) -> bool { + // FIXME(next-solver) + true + } + + fn coroutine_is_async(self, coroutine_def_id: Self::CoroutineId) -> bool { + // FIXME(next-solver) + true + } + + fn coroutine_is_gen(self, coroutine_def_id: Self::CoroutineId) -> bool { + // FIXME(next-solver) + false + } + + fn coroutine_is_async_gen(self, coroutine_def_id: Self::CoroutineId) -> bool { + // FIXME(next-solver) + false + } + + fn unsizing_params_for_adt(self, id: Self::AdtId) -> Self::UnsizingParams { + let def = AdtDef::new(id.0, self); + let num_params = self.generics_of(id.into()).count(); + + let maybe_unsizing_param_idx = |arg: GenericArg<'db>| match arg.kind() { + GenericArgKind::Type(ty) => match ty.kind() { + rustc_type_ir::TyKind::Param(p) => Some(p.index), + _ => None, + }, + GenericArgKind::Lifetime(_) => None, + GenericArgKind::Const(ct) => match ct.kind() { + rustc_type_ir::ConstKind::Param(p) => Some(p.index), + _ => None, + }, + }; + + // The last field of the structure has to exist and contain type/const parameters. + let variant = def.non_enum_variant(); + let fields = variant.fields(self.db()); + let Some((tail_field, prefix_fields)) = fields.split_last() else { + return UnsizingParams(DenseBitSet::new_empty(num_params)); + }; + + let field_types = self.db().field_types_ns(variant.id()); + let mut unsizing_params = DenseBitSet::new_empty(num_params); + let ty = field_types[tail_field.0]; + for arg in ty.instantiate_identity().walk() { + if let Some(i) = maybe_unsizing_param_idx(arg) { + unsizing_params.insert(i); + } + } + + // Ensure none of the other fields mention the parameters used + // in unsizing. + for field in prefix_fields { + for arg in field_types[field.0].instantiate_identity().walk() { + if let Some(i) = maybe_unsizing_param_idx(arg) { + unsizing_params.remove(i); + } + } + } + + UnsizingParams(unsizing_params) + } + + fn anonymize_bound_vars>( + self, + value: rustc_type_ir::Binder, + ) -> rustc_type_ir::Binder { + struct Anonymize<'a, 'db> { + interner: DbInterner<'db>, + map: &'a mut FxIndexMap, + } + impl<'db> BoundVarReplacerDelegate<'db> for Anonymize<'_, 'db> { + fn replace_region(&mut self, br: BoundRegion) -> Region<'db> { + let entry = self.map.entry(br.var); + let index = entry.index(); + let var = BoundVar::from_usize(index); + let kind = (*entry.or_insert_with(|| BoundVarKind::Region(BoundRegionKind::Anon))) + .expect_region(); + let br = BoundRegion { var, kind }; + Region::new_bound(self.interner, DebruijnIndex::ZERO, br) + } + fn replace_ty(&mut self, bt: BoundTy) -> Ty<'db> { + let entry = self.map.entry(bt.var); + let index = entry.index(); + let var = BoundVar::from_usize(index); + let kind = + (*entry.or_insert_with(|| BoundVarKind::Ty(BoundTyKind::Anon))).expect_ty(); + Ty::new_bound(self.interner, DebruijnIndex::ZERO, BoundTy { var, kind }) + } + fn replace_const(&mut self, bv: BoundConst) -> Const<'db> { + let entry = self.map.entry(bv.var); + let index = entry.index(); + let var = BoundVar::from_usize(index); + let () = (*entry.or_insert_with(|| BoundVarKind::Const)).expect_const(); + Const::new_bound(self.interner, DebruijnIndex::ZERO, BoundConst { var }) + } + } + + let mut map = Default::default(); + let delegate = Anonymize { interner: self, map: &mut map }; + let inner = self.replace_escaping_bound_vars_uncached(value.skip_binder(), delegate); + let bound_vars = CollectAndApply::collect_and_apply(map.into_values(), |xs| { + BoundVarKinds::new_from_iter(self, xs.iter().cloned()) + }); + Binder::bind_with_vars(inner, bound_vars) + } + + fn opaque_types_defined_by(self, defining_anchor: Self::LocalDefId) -> Self::LocalDefIds { + // FIXME(next-solver) + SolverDefIds::new_from_iter(self, []) + } + + fn alias_has_const_conditions(self, def_id: Self::DefId) -> bool { + // FIXME(next-solver) + false + } + + fn explicit_implied_const_bounds( + self, + def_id: Self::DefId, + ) -> EarlyBinder< + Self, + impl IntoIterator>>, + > { + // FIXME(next-solver) + EarlyBinder::bind([]) + } + + fn fn_is_const(self, id: Self::FunctionId) -> bool { + let id = match id.0 { + CallableDefId::FunctionId(id) => id, + _ => return false, + }; + self.db().function_signature(id).flags.contains(FnFlags::CONST) + } + + fn impl_is_const(self, def_id: Self::ImplId) -> bool { + false + } + + fn opt_alias_variances( + self, + kind: impl Into, + def_id: Self::DefId, + ) -> Option { + None + } + + fn type_of_opaque_hir_typeck(self, def_id: Self::LocalDefId) -> EarlyBinder { + match def_id { + SolverDefId::InternedOpaqueTyId(opaque) => { + let impl_trait_id = self.db().lookup_intern_impl_trait_id(opaque); + match impl_trait_id { + crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { + let infer = self.db().infer(func.into()); + EarlyBinder::bind(infer.type_of_rpit[idx].to_nextsolver(self)) + } + crate::ImplTraitId::TypeAliasImplTrait(..) + | crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { + // FIXME(next-solver) + EarlyBinder::bind(Ty::new_error(self, ErrorGuaranteed)) + } + } + } + _ => panic!("Unexpected SolverDefId in type_of_opaque_hir_typeck"), + } + } + + fn coroutine_hidden_types( + self, + def_id: Self::CoroutineId, + ) -> EarlyBinder>> + { + // FIXME(next-solver) + unimplemented!() + } + + fn is_default_trait(self, def_id: Self::TraitId) -> bool { + self.as_trait_lang_item(def_id).map_or(false, |l| matches!(l, SolverTraitLangItem::Sized)) + } + + fn trait_is_coinductive(self, trait_: Self::TraitId) -> bool { + self.db().trait_signature(trait_.0).flags.contains(TraitFlags::COINDUCTIVE) + } + + fn trait_is_unsafe(self, trait_: Self::TraitId) -> bool { + self.db().trait_signature(trait_.0).flags.contains(TraitFlags::UNSAFE) + } + + fn impl_self_is_guaranteed_unsized(self, def_id: Self::ImplId) -> bool { + false + } + + fn impl_specializes(self, impl_def_id: Self::ImplId, victim_def_id: Self::ImplId) -> bool { + false + } + + fn next_trait_solver_globally(self) -> bool { + true + } + + fn opaque_types_and_coroutines_defined_by( + self, + defining_anchor: Self::LocalDefId, + ) -> Self::LocalDefIds { + // FIXME(next-solver) + unimplemented!() + } + + type Probe = rustc_type_ir::solve::inspect::Probe>; + fn mk_probe(self, probe: rustc_type_ir::solve::inspect::Probe) -> Self::Probe { + probe + } + fn evaluate_root_goal_for_proof_tree_raw( + self, + canonical_goal: rustc_type_ir::solve::CanonicalInput, + ) -> (rustc_type_ir::solve::QueryResult, Self::Probe) { + rustc_next_trait_solver::solve::evaluate_root_goal_for_proof_tree_raw_provider::< + SolverContext<'db>, + Self, + >(self, canonical_goal) + } +} + +impl<'db> DbInterner<'db> { + pub fn shift_bound_var_indices(self, bound_vars: usize, value: T) -> T + where + T: rustc_type_ir::TypeFoldable, + { + let shift_bv = |bv: BoundVar| BoundVar::from_usize(bv.as_usize() + bound_vars); + self.replace_escaping_bound_vars_uncached( + value, + FnMutDelegate { + regions: &mut |r: BoundRegion| { + Region::new_bound( + self, + DebruijnIndex::ZERO, + BoundRegion { var: shift_bv(r.var), kind: r.kind }, + ) + }, + types: &mut |t: BoundTy| { + Ty::new_bound( + self, + DebruijnIndex::ZERO, + BoundTy { var: shift_bv(t.var), kind: t.kind }, + ) + }, + consts: &mut |c| { + Const::new_bound(self, DebruijnIndex::ZERO, BoundConst { var: shift_bv(c.var) }) + }, + }, + ) + } + + pub fn replace_escaping_bound_vars_uncached>>( + self, + value: T, + delegate: impl BoundVarReplacerDelegate<'db>, + ) -> T { + if !value.has_escaping_bound_vars() { + value + } else { + let mut replacer = BoundVarReplacer::new(self, delegate); + value.fold_with(&mut replacer) + } + } + + pub fn replace_bound_vars_uncached>>( + self, + value: Binder<'db, T>, + delegate: impl BoundVarReplacerDelegate<'db>, + ) -> T { + self.replace_escaping_bound_vars_uncached(value.skip_binder(), delegate) + } + + pub fn mk_fn_sig( + self, + inputs: I, + output: Ty<'db>, + c_variadic: bool, + safety: Safety, + abi: FnAbi, + ) -> FnSig<'db> + where + I: IntoIterator>, + { + FnSig { + inputs_and_output: Tys::new_from_iter( + self, + inputs.into_iter().chain(std::iter::once(output)), + ), + c_variadic, + safety, + abi, + } + } +} + +macro_rules! TrivialTypeTraversalImpls { + ($($ty:ty,)+) => { + $( + impl<'db> rustc_type_ir::TypeFoldable> for $ty { + fn try_fold_with>>( + self, + _: &mut F, + ) -> ::std::result::Result { + Ok(self) + } + + #[inline] + fn fold_with>>( + self, + _: &mut F, + ) -> Self { + self + } + } + + impl<'db> rustc_type_ir::TypeVisitable> for $ty { + #[inline] + fn visit_with>>( + &self, + _: &mut F) + -> F::Result + { + ::output() + } + } + )+ + }; +} + +TrivialTypeTraversalImpls! { + SolverDefId, + TraitIdWrapper, + TypeAliasIdWrapper, + CallableIdWrapper, + ClosureIdWrapper, + CoroutineIdWrapper, + AdtIdWrapper, + ImplIdWrapper, + Pattern<'db>, + Safety, + FnAbi, + Span, + ParamConst, + ParamTy, + BoundRegion, + BoundVar, + Placeholder, + Placeholder, + Placeholder, +} + +mod tls_cache { + use crate::db::HirDatabase; + + use super::DbInterner; + use base_db::Nonce; + use rustc_type_ir::search_graph::GlobalCache; + use salsa::Revision; + use std::cell::RefCell; + + struct Cache { + cache: GlobalCache>, + revision: Revision, + db_nonce: Nonce, + } + + thread_local! { + static GLOBAL_CACHE: RefCell> = const { RefCell::new(None) }; + } + + pub(super) fn with_cache<'db, T>( + db: &'db dyn HirDatabase, + f: impl FnOnce(&mut GlobalCache>) -> T, + ) -> T { + GLOBAL_CACHE.with_borrow_mut(|handle| { + let (db_nonce, revision) = db.nonce_and_revision(); + let handle = match handle { + Some(handle) => { + if handle.revision != revision || db_nonce != handle.db_nonce { + *handle = Cache { cache: GlobalCache::default(), revision, db_nonce }; + } + handle + } + None => handle.insert(Cache { cache: GlobalCache::default(), revision, db_nonce }), + }; + + // SAFETY: No idea + f(unsafe { + std::mem::transmute::< + &mut GlobalCache>, + &mut GlobalCache>, + >(&mut handle.cache) + }) + }) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs new file mode 100644 index 0000000000000..407e157564397 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs @@ -0,0 +1,261 @@ +//! Things related to IR printing in the next-trait-solver. + +use std::any::type_name_of_val; + +use rustc_type_ir::inherent::SliceLike; +use rustc_type_ir::{self as ty, ir_print::IrPrint}; + +use crate::db::HirDatabase; + +use super::SolverDefId; +use super::interner::DbInterner; + +impl<'db> IrPrint> for DbInterner<'db> { + fn print(t: &ty::AliasTy, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug(t: &ty::AliasTy, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + salsa::with_attached_database(|db| match t.def_id { + SolverDefId::TypeAliasId(id) => fmt.write_str(&format!( + "AliasTy({:?}[{:?}])", + db.as_view::().type_alias_signature(id).name.as_str(), + t.args + )), + SolverDefId::InternedOpaqueTyId(id) => { + fmt.write_str(&format!("AliasTy({:?}[{:?}])", id, t.args)) + } + _ => panic!("Expected TypeAlias or OpaqueTy."), + }) + .unwrap_or_else(|| fmt.write_str(&format!("AliasTy({:?}[{:?}])", t.def_id, t.args))) + } +} + +impl<'db> IrPrint> for DbInterner<'db> { + fn print(t: &ty::AliasTerm, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug(t: &ty::AliasTerm, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + salsa::with_attached_database(|db| match t.def_id { + SolverDefId::TypeAliasId(id) => fmt.write_str(&format!( + "AliasTerm({:?}[{:?}])", + db.as_view::().type_alias_signature(id).name.as_str(), + t.args + )), + SolverDefId::InternedOpaqueTyId(id) => { + fmt.write_str(&format!("AliasTerm({:?}[{:?}])", id, t.args)) + } + _ => panic!("Expected TypeAlias or OpaqueTy."), + }) + .unwrap_or_else(|| fmt.write_str(&format!("AliasTerm({:?}[{:?}])", t.def_id, t.args))) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print(t: &ty::TraitRef, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug(t: &ty::TraitRef, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + salsa::with_attached_database(|db| { + let trait_ = t.def_id.0; + let self_ty = &t.args.as_slice()[0]; + let trait_args = &t.args.as_slice()[1..]; + if trait_args.is_empty() { + fmt.write_str(&format!( + "{:?}: {}", + self_ty, + db.as_view::().trait_signature(trait_).name.as_str() + )) + } else { + fmt.write_str(&format!( + "{:?}: {}<{:?}>", + self_ty, + db.as_view::().trait_signature(trait_).name.as_str(), + trait_args + )) + } + }) + .unwrap_or_else(|| fmt.write_str(&format!("TraitRef({:?}[{:?}])", t.def_id, t.args))) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print(t: &ty::TraitPredicate, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &ty::TraitPredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + fmt.write_str(&format!("TODO: {:?}", type_name_of_val(t))) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print( + t: &rustc_type_ir::HostEffectPredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &rustc_type_ir::HostEffectPredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + fmt.write_str(&format!("TODO: {:?}", type_name_of_val(t))) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print( + t: &ty::ExistentialTraitRef, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &ty::ExistentialTraitRef, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + salsa::with_attached_database(|db| { + let trait_ = t.def_id.0; + fmt.write_str(&format!( + "ExistentialTraitRef({:?}[{:?}])", + db.as_view::().trait_signature(trait_).name.as_str(), + t.args + )) + }) + .unwrap_or_else(|| { + fmt.write_str(&format!("ExistentialTraitRef({:?}[{:?}])", t.def_id, t.args)) + }) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print( + t: &ty::ExistentialProjection, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &ty::ExistentialProjection, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + salsa::with_attached_database(|db| { + let id = match t.def_id { + SolverDefId::TypeAliasId(id) => id, + _ => panic!("Expected trait."), + }; + fmt.write_str(&format!( + "ExistentialProjection(({:?}[{:?}]) -> {:?})", + db.as_view::().type_alias_signature(id).name.as_str(), + t.args, + t.term + )) + }) + .unwrap_or_else(|| { + fmt.write_str(&format!( + "ExistentialProjection(({:?}[{:?}]) -> {:?})", + t.def_id, t.args, t.term + )) + }) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print( + t: &ty::ProjectionPredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &ty::ProjectionPredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + salsa::with_attached_database(|db| { + let id = match t.projection_term.def_id { + SolverDefId::TypeAliasId(id) => id, + _ => panic!("Expected trait."), + }; + fmt.write_str(&format!( + "ProjectionPredicate(({:?}[{:?}]) -> {:?})", + db.as_view::().type_alias_signature(id).name.as_str(), + t.projection_term.args, + t.term + )) + }) + .unwrap_or_else(|| { + fmt.write_str(&format!( + "ProjectionPredicate(({:?}[{:?}]) -> {:?})", + t.projection_term.def_id, t.projection_term.args, t.term + )) + }) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print(t: &ty::NormalizesTo, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &ty::NormalizesTo, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + fmt.write_str(&format!("TODO: {:?}", type_name_of_val(t))) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print( + t: &ty::SubtypePredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &ty::SubtypePredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + fmt.write_str(&format!("TODO: {:?}", type_name_of_val(t))) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print(t: &ty::CoercePredicate, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &ty::CoercePredicate, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + fmt.write_str(&format!("TODO: {:?}", type_name_of_val(t))) + } +} +impl<'db> IrPrint> for DbInterner<'db> { + fn print(t: &ty::FnSig, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug(t: &ty::FnSig, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fmt.write_str(&format!("TODO: {:?}", type_name_of_val(t))) + } +} + +impl<'db> IrPrint>> for DbInterner<'db> { + fn print( + t: &rustc_type_ir::PatternKind>, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + Self::print_debug(t, fmt) + } + + fn print_debug( + t: &rustc_type_ir::PatternKind>, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + fmt.write_str(&format!("TODO: {:?}", type_name_of_val(t))) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs new file mode 100644 index 0000000000000..b24b996b0927c --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs @@ -0,0 +1,1692 @@ +//! Things useful for mapping to/from Chalk and next-trait-solver types. + +use base_db::Crate; +use chalk_ir::{ + CanonicalVarKind, CanonicalVarKinds, FnPointer, InferenceVar, Substitution, TyVariableKind, + WellFormed, cast::Cast, fold::Shift, interner::HasInterner, +}; +use hir_def::{ + CallableDefId, ConstParamId, FunctionId, GeneralConstId, LifetimeParamId, TypeAliasId, + TypeOrConstParamId, TypeParamId, signatures::TraitFlags, +}; +use hir_def::{GenericDefId, GenericParamId}; +use intern::sym; +use rustc_type_ir::{ + AliasTerm, BoundVar, DebruijnIndex, ExistentialProjection, ExistentialTraitRef, Interner as _, + OutlivesPredicate, ProjectionPredicate, TypeFoldable, TypeSuperFoldable, TypeVisitable, + TypeVisitableExt, UniverseIndex, elaborate, + inherent::{BoundVarLike, Clause as _, IntoKind, PlaceholderLike, SliceLike, Ty as _}, + shift_vars, + solve::Goal, +}; +use salsa::plumbing::FromId; +use salsa::{Id, plumbing::AsId}; + +use crate::next_solver::BoundConst; +use crate::{ + ConstScalar, ImplTraitId, Interner, MemoryMap, + db::{ + HirDatabase, InternedClosureId, InternedCoroutineId, InternedLifetimeParamId, + InternedOpaqueTyId, InternedTypeOrConstParamId, + }, + from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, + mapping::ToChalk, + next_solver::{ + Binder, ClauseKind, ConstBytes, TraitPredicate, UnevaluatedConst, + interner::{AdtDef, BoundVarKind, BoundVarKinds, DbInterner}, + }, + to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id, +}; +use crate::{ + from_placeholder_idx, lt_from_placeholder_idx, lt_to_placeholder_idx, to_placeholder_idx, +}; + +use super::{ + BoundExistentialPredicate, BoundExistentialPredicates, BoundRegion, BoundRegionKind, BoundTy, + BoundTyKind, Canonical, CanonicalVars, Clause, Clauses, Const, Ctor, EarlyParamRegion, + ErrorGuaranteed, ExistentialPredicate, GenericArg, GenericArgs, ParamConst, ParamEnv, ParamTy, + Placeholder, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Predicate, PredicateKind, + Region, SolverDefId, SubtypePredicate, Term, TraitRef, Ty, Tys, ValueConst, VariancesOf, +}; + +// FIXME: This should urgently go (as soon as we finish the migration off Chalk, that is). +pub fn convert_binder_to_early_binder<'db, T: rustc_type_ir::TypeFoldable>>( + interner: DbInterner<'db>, + def: GenericDefId, + binder: rustc_type_ir::Binder, T>, +) -> rustc_type_ir::EarlyBinder, T> { + let mut folder = BinderToEarlyBinder { + interner, + debruijn: rustc_type_ir::DebruijnIndex::ZERO, + params: crate::generics::generics(interner.db, def).iter_id().collect(), + }; + rustc_type_ir::EarlyBinder::bind(binder.skip_binder().fold_with(&mut folder)) +} + +struct BinderToEarlyBinder<'db> { + interner: DbInterner<'db>, + debruijn: rustc_type_ir::DebruijnIndex, + params: Vec, +} + +impl<'db> rustc_type_ir::TypeFolder> for BinderToEarlyBinder<'db> { + fn cx(&self) -> DbInterner<'db> { + self.interner + } + + fn fold_binder( + &mut self, + t: rustc_type_ir::Binder, T>, + ) -> rustc_type_ir::Binder, T> + where + T: TypeFoldable>, + { + self.debruijn.shift_in(1); + let result = t.super_fold_with(self); + self.debruijn.shift_out(1); + result + } + + fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { + match t.kind() { + rustc_type_ir::TyKind::Bound(debruijn, bound_ty) if self.debruijn == debruijn => { + let var: rustc_type_ir::BoundVar = bound_ty.var(); + let GenericParamId::TypeParamId(id) = self.params[bound_ty.var.as_usize()] else { + unreachable!() + }; + Ty::new( + self.cx(), + rustc_type_ir::TyKind::Param(ParamTy { index: var.as_u32(), id }), + ) + } + _ => t.super_fold_with(self), + } + } + + fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { + match r.kind() { + rustc_type_ir::ReBound(debruijn, bound_region) if self.debruijn == debruijn => { + let var: rustc_type_ir::BoundVar = bound_region.var(); + let GenericParamId::LifetimeParamId(id) = self.params[bound_region.var.as_usize()] + else { + unreachable!() + }; + Region::new( + self.cx(), + rustc_type_ir::RegionKind::ReEarlyParam(EarlyParamRegion { + index: var.as_u32(), + id, + }), + ) + } + _ => r, + } + } + + fn fold_const(&mut self, c: Const<'db>) -> Const<'db> { + match c.kind() { + rustc_type_ir::ConstKind::Bound(debruijn, var) if self.debruijn == debruijn => { + let GenericParamId::ConstParamId(id) = self.params[var.var.as_usize()] else { + unreachable!() + }; + Const::new( + self.cx(), + rustc_type_ir::ConstKind::Param(ParamConst { index: var.var.as_u32(), id }), + ) + } + _ => c.super_fold_with(self), + } + } +} + +pub trait ChalkToNextSolver<'db, Out> { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Out; +} + +pub trait NextSolverToChalk<'db, Out> { + fn to_chalk(self, interner: DbInterner<'db>) -> Out; +} + +impl NextSolverToChalk<'_, chalk_ir::Mutability> for rustc_ast_ir::Mutability { + fn to_chalk(self, interner: DbInterner<'_>) -> chalk_ir::Mutability { + match self { + rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, + rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, + } + } +} + +impl NextSolverToChalk<'_, chalk_ir::Safety> for crate::next_solver::abi::Safety { + fn to_chalk(self, interner: DbInterner<'_>) -> chalk_ir::Safety { + match self { + crate::next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, + crate::next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe, + } + } +} + +impl<'db> ChalkToNextSolver<'db, Ty<'db>> for chalk_ir::Ty { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Ty<'db> { + Ty::new( + interner, + match self.kind(Interner) { + chalk_ir::TyKind::Adt(adt_id, substitution) => { + let def = AdtDef::new(adt_id.0, interner); + let args = substitution.to_nextsolver(interner); + rustc_type_ir::TyKind::Adt(def, args) + } + chalk_ir::TyKind::AssociatedType(assoc_type_id, substitution) => { + let def_id = SolverDefId::TypeAliasId(from_assoc_type_id(*assoc_type_id)); + let args: GenericArgs<'db> = substitution.to_nextsolver(interner); + let alias_ty = rustc_type_ir::AliasTy::new(interner, def_id, args.iter()); + rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Projection, alias_ty) + } + chalk_ir::TyKind::Scalar(scalar) => match scalar { + chalk_ir::Scalar::Bool => rustc_type_ir::TyKind::Bool, + chalk_ir::Scalar::Char => rustc_type_ir::TyKind::Char, + chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize) => { + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::Isize) + } + chalk_ir::Scalar::Int(chalk_ir::IntTy::I8) => { + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I8) + } + chalk_ir::Scalar::Int(chalk_ir::IntTy::I16) => { + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I16) + } + chalk_ir::Scalar::Int(chalk_ir::IntTy::I32) => { + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I32) + } + chalk_ir::Scalar::Int(chalk_ir::IntTy::I64) => { + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I64) + } + chalk_ir::Scalar::Int(chalk_ir::IntTy::I128) => { + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I128) + } + chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize) => { + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::Usize) + } + chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8) => { + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U8) + } + chalk_ir::Scalar::Uint(chalk_ir::UintTy::U16) => { + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U16) + } + chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32) => { + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U32) + } + chalk_ir::Scalar::Uint(chalk_ir::UintTy::U64) => { + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U64) + } + chalk_ir::Scalar::Uint(chalk_ir::UintTy::U128) => { + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U128) + } + chalk_ir::Scalar::Float(chalk_ir::FloatTy::F16) => { + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F16) + } + chalk_ir::Scalar::Float(chalk_ir::FloatTy::F32) => { + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F32) + } + chalk_ir::Scalar::Float(chalk_ir::FloatTy::F64) => { + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F64) + } + chalk_ir::Scalar::Float(chalk_ir::FloatTy::F128) => { + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F128) + } + }, + chalk_ir::TyKind::Tuple(_, substitution) => { + let args = substitution.to_nextsolver(interner); + rustc_type_ir::TyKind::Tuple(args) + } + chalk_ir::TyKind::Array(ty, len) => rustc_type_ir::TyKind::Array( + ty.to_nextsolver(interner), + len.to_nextsolver(interner), + ), + chalk_ir::TyKind::Slice(ty) => { + rustc_type_ir::TyKind::Slice(ty.to_nextsolver(interner)) + } + chalk_ir::TyKind::Raw(mutability, ty) => rustc_type_ir::RawPtr( + ty.to_nextsolver(interner), + mutability.to_nextsolver(interner), + ), + chalk_ir::TyKind::Ref(mutability, lifetime, ty) => rustc_type_ir::TyKind::Ref( + lifetime.to_nextsolver(interner), + ty.to_nextsolver(interner), + mutability.to_nextsolver(interner), + ), + chalk_ir::TyKind::OpaqueType(def_id, substitution) => { + let id: InternedOpaqueTyId = (*def_id).into(); + let args: GenericArgs<'db> = substitution.to_nextsolver(interner); + let alias_ty = rustc_type_ir::AliasTy::new(interner, id.into(), args); + rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Opaque, alias_ty) + } + chalk_ir::TyKind::FnDef(fn_def_id, substitution) => { + let def_id = CallableDefId::from_chalk(interner.db(), *fn_def_id); + rustc_type_ir::TyKind::FnDef( + def_id.into(), + substitution.to_nextsolver(interner), + ) + } + chalk_ir::TyKind::Str => rustc_type_ir::TyKind::Str, + chalk_ir::TyKind::Never => rustc_type_ir::TyKind::Never, + chalk_ir::TyKind::Closure(closure_id, substitution) => { + let id: InternedClosureId = (*closure_id).into(); + rustc_type_ir::TyKind::Closure(id.into(), substitution.to_nextsolver(interner)) + } + chalk_ir::TyKind::Coroutine(coroutine_id, substitution) => { + let id: InternedCoroutineId = (*coroutine_id).into(); + rustc_type_ir::TyKind::Coroutine( + id.into(), + substitution.to_nextsolver(interner), + ) + } + chalk_ir::TyKind::CoroutineWitness(coroutine_id, substitution) => { + let id: InternedCoroutineId = (*coroutine_id).into(); + rustc_type_ir::TyKind::CoroutineWitness( + id.into(), + substitution.to_nextsolver(interner), + ) + } + chalk_ir::TyKind::Foreign(foreign_def_id) => rustc_type_ir::TyKind::Foreign( + crate::from_foreign_def_id(*foreign_def_id).into(), + ), + chalk_ir::TyKind::Error => rustc_type_ir::TyKind::Error(ErrorGuaranteed), + chalk_ir::TyKind::Dyn(dyn_ty) => { + // exists { for<...> ^1.0: ... } + let bounds = BoundExistentialPredicates::new_from_iter( + interner, + dyn_ty.bounds.skip_binders().iter(Interner).filter_map(|pred| { + // for<...> ^1.0: ... + let (val, binders) = pred.clone().into_value_and_skipped_binders(); + let bound_vars = binders.to_nextsolver(interner); + let clause = match val { + chalk_ir::WhereClause::Implemented(trait_ref) => { + let trait_id = from_chalk_trait_id(trait_ref.trait_id); + if interner + .db() + .trait_signature(trait_id) + .flags + .contains(TraitFlags::AUTO) + { + ExistentialPredicate::AutoTrait(trait_id.into()) + } else { + let args = GenericArgs::new_from_iter( + interner, + trait_ref + .substitution + .iter(Interner) + .skip(1) + .map(|a| a.clone().shifted_out(Interner).unwrap()) + .map(|a| a.to_nextsolver(interner)), + ); + let trait_ref = ExistentialTraitRef::new_from_args( + interner, trait_id.into(), args, + ); + ExistentialPredicate::Trait(trait_ref) + } + } + chalk_ir::WhereClause::AliasEq(alias_eq) => { + let (def_id, args) = match &alias_eq.alias { + chalk_ir::AliasTy::Projection(projection) => { + let id = + from_assoc_type_id(projection.associated_ty_id); + let def_id = SolverDefId::TypeAliasId(id); + let generics = interner.generics_of(def_id); + let parent_len = generics.parent_count; + let substs = projection.substitution.iter(Interner).skip(1); + + let args = GenericArgs::new_from_iter( + interner, + substs + .map(|a| { + a.clone().shifted_out(Interner).unwrap() + }) + .map(|a| a.to_nextsolver(interner)), + ); + (def_id, args) + } + chalk_ir::AliasTy::Opaque(opaque_ty) => { + panic!("Invalid ExistentialPredicate (opaques can't be named)."); + } + }; + let term = alias_eq + .ty + .clone() + .shifted_out(Interner) + .unwrap() + .to_nextsolver(interner) + .into(); + let projection = ExistentialProjection::new_from_args( + interner, def_id, args, term, + ); + ExistentialPredicate::Projection(projection) + } + chalk_ir::WhereClause::LifetimeOutlives(lifetime_outlives) => { + return None; + } + chalk_ir::WhereClause::TypeOutlives(type_outlives) => return None, + }; + + Some(Binder::bind_with_vars(clause, bound_vars)) + }), + ); + let region = dyn_ty.lifetime.to_nextsolver(interner); + rustc_type_ir::TyKind::Dynamic(bounds, region) + } + chalk_ir::TyKind::Alias(alias_ty) => match alias_ty { + chalk_ir::AliasTy::Projection(projection_ty) => { + let def_id = SolverDefId::TypeAliasId(from_assoc_type_id( + projection_ty.associated_ty_id, + )); + let alias_ty = rustc_type_ir::AliasTy::new_from_args( + interner, + def_id, + projection_ty.substitution.to_nextsolver(interner), + ); + rustc_type_ir::TyKind::Alias( + rustc_type_ir::AliasTyKind::Projection, + alias_ty, + ) + } + chalk_ir::AliasTy::Opaque(opaque_ty) => { + let id: InternedOpaqueTyId = opaque_ty.opaque_ty_id.into(); + let def_id = SolverDefId::InternedOpaqueTyId(id); + let alias_ty = rustc_type_ir::AliasTy::new_from_args( + interner, + def_id, + opaque_ty.substitution.to_nextsolver(interner), + ); + rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Opaque, alias_ty) + } + }, + chalk_ir::TyKind::Function(fn_pointer) => { + let sig_tys = fn_pointer.clone().into_binders(Interner).to_nextsolver(interner); + let header = rustc_type_ir::FnHeader { + abi: fn_pointer.sig.abi, + c_variadic: fn_pointer.sig.variadic, + safety: match fn_pointer.sig.safety { + chalk_ir::Safety::Safe => super::abi::Safety::Safe, + chalk_ir::Safety::Unsafe => super::abi::Safety::Unsafe, + }, + }; + + rustc_type_ir::TyKind::FnPtr(sig_tys, header) + } + // The schema here is quite confusing. + // The new solver, like rustc, uses `Param` and `EarlyBinder` for generic params. It uses `BoundVar` + // and `Placeholder` together with `Binder` for HRTB, which we mostly don't handle. + // Chalk uses `Placeholder` for generic params and `BoundVar` quite liberally, and this is quite a + // problem. `chalk_ir::TyKind::BoundVar` can represent either HRTB or generic params, depending on the + // context. When returned from signature queries, the outer `Binders` represent the generic params. + // But there are also inner `Binders` for HRTB. + // AFAIK there is no way to tell which of the meanings is relevant, so we just use `rustc_type_ir::Bound` + // here, and hope for the best. If you are working with new solver types, therefore, use the new solver + // lower queries. + // Hopefully sooner than later Chalk will be ripped from the codebase and we can avoid that problem. + // For details about the rustc setup, read: https://rustc-dev-guide.rust-lang.org/generic_parameters_summary.html + // and the following chapters. + chalk_ir::TyKind::Placeholder(placeholder_index) => { + let (id, index) = from_placeholder_idx(interner.db, *placeholder_index); + rustc_type_ir::TyKind::Param(ParamTy { + id: TypeParamId::from_unchecked(id), + index, + }) + } + chalk_ir::TyKind::BoundVar(bound_var) => rustc_type_ir::TyKind::Bound( + bound_var.debruijn.to_nextsolver(interner), + BoundTy { + var: rustc_type_ir::BoundVar::from_usize(bound_var.index), + kind: BoundTyKind::Anon, + }, + ), + chalk_ir::TyKind::InferenceVar(inference_var, ty_variable_kind) => { + rustc_type_ir::TyKind::Infer( + (*inference_var, *ty_variable_kind).to_nextsolver(interner), + ) + } + }, + ) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::Ty> for Ty<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Ty { + convert_ty_for_result(interner, self) + } +} + +impl<'db> ChalkToNextSolver<'db, Region<'db>> for chalk_ir::Lifetime { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Region<'db> { + Region::new( + interner, + match self.data(Interner) { + chalk_ir::LifetimeData::BoundVar(bound_var) => rustc_type_ir::RegionKind::ReBound( + bound_var.debruijn.to_nextsolver(interner), + BoundRegion { + var: rustc_type_ir::BoundVar::from_u32(bound_var.index as u32), + kind: BoundRegionKind::Anon, + }, + ), + chalk_ir::LifetimeData::InferenceVar(inference_var) => { + rustc_type_ir::RegionKind::ReVar(rustc_type_ir::RegionVid::from_u32( + inference_var.index(), + )) + } + chalk_ir::LifetimeData::Placeholder(placeholder_index) => { + let (id, index) = lt_from_placeholder_idx(interner.db, *placeholder_index); + rustc_type_ir::RegionKind::ReEarlyParam(EarlyParamRegion { id, index }) + } + chalk_ir::LifetimeData::Static => rustc_type_ir::RegionKind::ReStatic, + chalk_ir::LifetimeData::Erased => rustc_type_ir::RegionKind::ReErased, + chalk_ir::LifetimeData::Phantom(_, _) => { + unreachable!() + } + chalk_ir::LifetimeData::Error => { + rustc_type_ir::RegionKind::ReError(ErrorGuaranteed) + } + }, + ) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::Lifetime> for Region<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Lifetime { + convert_region_for_result(interner, self) + } +} + +impl<'db> ChalkToNextSolver<'db, Const<'db>> for chalk_ir::Const { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Const<'db> { + let data = self.data(Interner); + Const::new( + interner, + match &data.value { + chalk_ir::ConstValue::BoundVar(bound_var) => rustc_type_ir::ConstKind::Bound( + bound_var.debruijn.to_nextsolver(interner), + BoundConst { var: rustc_type_ir::BoundVar::from_usize(bound_var.index) }, + ), + chalk_ir::ConstValue::InferenceVar(inference_var) => { + rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Var( + rustc_type_ir::ConstVid::from_u32(inference_var.index()), + )) + } + chalk_ir::ConstValue::Placeholder(placeholder_index) => { + let (id, index) = from_placeholder_idx(interner.db, *placeholder_index); + rustc_type_ir::ConstKind::Param(ParamConst { + id: ConstParamId::from_unchecked(id), + index, + }) + } + chalk_ir::ConstValue::Concrete(concrete_const) => match &concrete_const.interned { + ConstScalar::Bytes(bytes, memory) => { + rustc_type_ir::ConstKind::Value(ValueConst::new( + data.ty.to_nextsolver(interner), + ConstBytes(bytes.clone(), memory.clone()), + )) + } + ConstScalar::UnevaluatedConst(c, subst) => { + let def = match *c { + GeneralConstId::ConstId(id) => SolverDefId::ConstId(id), + GeneralConstId::StaticId(id) => SolverDefId::StaticId(id), + }; + let args = subst.to_nextsolver(interner); + rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new(def, args)) + } + ConstScalar::Unknown => rustc_type_ir::ConstKind::Error(ErrorGuaranteed), + }, + }, + ) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::Const> for Const<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Const { + convert_const_for_result(interner, self) + } +} + +impl<'db> ChalkToNextSolver<'db, rustc_type_ir::FnSigTys>> + for chalk_ir::FnSubst +{ + fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::FnSigTys> { + rustc_type_ir::FnSigTys { + inputs_and_output: Tys::new_from_iter( + interner, + self.0.iter(Interner).map(|g| g.assert_ty_ref(Interner).to_nextsolver(interner)), + ), + } + } +} + +impl< + 'db, + U: TypeVisitable>, + T: Clone + ChalkToNextSolver<'db, U> + HasInterner, +> ChalkToNextSolver<'db, rustc_type_ir::Binder, U>> for chalk_ir::Binders +{ + fn to_nextsolver( + &self, + interner: DbInterner<'db>, + ) -> rustc_type_ir::Binder, U> { + let (val, binders) = self.clone().into_value_and_skipped_binders(); + rustc_type_ir::Binder::bind_with_vars( + val.to_nextsolver(interner), + binders.to_nextsolver(interner), + ) + } +} + +impl<'db> ChalkToNextSolver<'db, BoundVarKinds> for chalk_ir::VariableKinds { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKinds { + BoundVarKinds::new_from_iter( + interner, + self.iter(Interner).map(|v| v.to_nextsolver(interner)), + ) + } +} + +impl<'db> ChalkToNextSolver<'db, BoundVarKind> for chalk_ir::VariableKind { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKind { + match self { + chalk_ir::VariableKind::Ty(_ty_variable_kind) => BoundVarKind::Ty(BoundTyKind::Anon), + chalk_ir::VariableKind::Lifetime => BoundVarKind::Region(BoundRegionKind::Anon), + chalk_ir::VariableKind::Const(_ty) => BoundVarKind::Const, + } + } +} + +impl<'db> ChalkToNextSolver<'db, GenericArg<'db>> for chalk_ir::GenericArg { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> GenericArg<'db> { + match self.data(Interner) { + chalk_ir::GenericArgData::Ty(ty) => ty.to_nextsolver(interner).into(), + chalk_ir::GenericArgData::Lifetime(lifetime) => lifetime.to_nextsolver(interner).into(), + chalk_ir::GenericArgData::Const(const_) => const_.to_nextsolver(interner).into(), + } + } +} + +impl<'db> ChalkToNextSolver<'db, GenericArgs<'db>> for chalk_ir::Substitution { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> GenericArgs<'db> { + GenericArgs::new_from_iter( + interner, + self.iter(Interner).map(|arg| -> GenericArg<'db> { arg.to_nextsolver(interner) }), + ) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::Substitution> for GenericArgs<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Substitution { + convert_args_for_result(interner, self.as_slice()) + } +} + +impl<'db> ChalkToNextSolver<'db, Tys<'db>> for chalk_ir::Substitution { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Tys<'db> { + Tys::new_from_iter( + interner, + self.iter(Interner).map(|arg| -> Ty<'db> { + match arg.data(Interner) { + chalk_ir::GenericArgData::Ty(ty) => ty.to_nextsolver(interner), + chalk_ir::GenericArgData::Lifetime(_) => unreachable!(), + chalk_ir::GenericArgData::Const(_) => unreachable!(), + } + }), + ) + } +} + +impl<'db> NextSolverToChalk<'db, crate::Substitution> for Tys<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> crate::Substitution { + Substitution::from_iter( + Interner, + self.inner().iter().map(|ty| ty.to_chalk(interner).cast(Interner)), + ) + } +} + +impl<'db> ChalkToNextSolver<'db, rustc_type_ir::DebruijnIndex> for chalk_ir::DebruijnIndex { + fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_type_ir::DebruijnIndex { + rustc_type_ir::DebruijnIndex::from_u32(self.depth()) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::DebruijnIndex> for rustc_type_ir::DebruijnIndex { + fn to_chalk(self, _interner: DbInterner<'db>) -> chalk_ir::DebruijnIndex { + chalk_ir::DebruijnIndex::new(self.index() as u32) + } +} + +impl<'db> ChalkToNextSolver<'db, rustc_type_ir::UniverseIndex> for chalk_ir::UniverseIndex { + fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_type_ir::UniverseIndex { + rustc_type_ir::UniverseIndex::from_u32(self.counter as u32) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::UniverseIndex> for rustc_type_ir::UniverseIndex { + fn to_chalk(self, _interner: DbInterner<'db>) -> chalk_ir::UniverseIndex { + chalk_ir::UniverseIndex { counter: self.index() } + } +} + +impl<'db> ChalkToNextSolver<'db, rustc_type_ir::InferTy> + for (chalk_ir::InferenceVar, chalk_ir::TyVariableKind) +{ + fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::InferTy { + match self.1 { + chalk_ir::TyVariableKind::General => { + rustc_type_ir::InferTy::TyVar(rustc_type_ir::TyVid::from_u32(self.0.index())) + } + chalk_ir::TyVariableKind::Integer => { + rustc_type_ir::InferTy::IntVar(rustc_type_ir::IntVid::from_u32(self.0.index())) + } + chalk_ir::TyVariableKind::Float => { + rustc_type_ir::InferTy::FloatVar(rustc_type_ir::FloatVid::from_u32(self.0.index())) + } + } + } +} + +impl<'db> ChalkToNextSolver<'db, rustc_ast_ir::Mutability> for chalk_ir::Mutability { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_ast_ir::Mutability { + match self { + chalk_ir::Mutability::Mut => rustc_ast_ir::Mutability::Mut, + chalk_ir::Mutability::Not => rustc_ast_ir::Mutability::Not, + } + } +} + +impl<'db> ChalkToNextSolver<'db, rustc_type_ir::Variance> for crate::Variance { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::Variance { + match self { + crate::Variance::Covariant => rustc_type_ir::Variance::Covariant, + crate::Variance::Invariant => rustc_type_ir::Variance::Invariant, + crate::Variance::Contravariant => rustc_type_ir::Variance::Contravariant, + crate::Variance::Bivariant => rustc_type_ir::Variance::Bivariant, + } + } +} + +impl<'db> ChalkToNextSolver<'db, rustc_type_ir::Variance> for chalk_ir::Variance { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::Variance { + match self { + chalk_ir::Variance::Covariant => rustc_type_ir::Variance::Covariant, + chalk_ir::Variance::Invariant => rustc_type_ir::Variance::Invariant, + chalk_ir::Variance::Contravariant => rustc_type_ir::Variance::Contravariant, + } + } +} + +impl<'db> ChalkToNextSolver<'db, VariancesOf> for chalk_ir::Variances { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> VariancesOf { + VariancesOf::new_from_iter( + interner, + self.as_slice(Interner).iter().map(|v| v.to_nextsolver(interner)), + ) + } +} + +impl<'db> ChalkToNextSolver<'db, Goal, Predicate<'db>>> + for chalk_ir::InEnvironment> +{ + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Goal, Predicate<'db>> { + Goal::new( + interner, + self.environment.to_nextsolver(interner), + self.goal.to_nextsolver(interner), + ) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::InEnvironment>> + for Goal, Predicate<'db>> +{ + fn to_chalk( + self, + interner: DbInterner<'db>, + ) -> chalk_ir::InEnvironment> { + chalk_ir::InEnvironment { + environment: self.param_env.to_chalk(interner), + goal: self.predicate.to_chalk(interner), + } + } +} + +impl<'db, T: HasInterner + ChalkToNextSolver<'db, U>, U> + ChalkToNextSolver<'db, Canonical<'db, U>> for chalk_ir::Canonical +{ + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Canonical<'db, U> { + let variables = CanonicalVars::new_from_iter( + interner, + self.binders.iter(Interner).map(|k| match &k.kind { + chalk_ir::VariableKind::Ty(ty_variable_kind) => match ty_variable_kind { + // FIXME(next-solver): the info is incorrect, but we have no way to store the information in Chalk. + TyVariableKind::General => rustc_type_ir::CanonicalVarKind::Ty { + ui: UniverseIndex::ROOT, + sub_root: BoundVar::from_u32(0), + }, + TyVariableKind::Integer => rustc_type_ir::CanonicalVarKind::Int, + TyVariableKind::Float => rustc_type_ir::CanonicalVarKind::Float, + }, + chalk_ir::VariableKind::Lifetime => { + rustc_type_ir::CanonicalVarKind::Region(UniverseIndex::ROOT) + } + chalk_ir::VariableKind::Const(ty) => { + rustc_type_ir::CanonicalVarKind::Const(UniverseIndex::ROOT) + } + }), + ); + Canonical { + max_universe: UniverseIndex::ROOT, + value: self.value.to_nextsolver(interner), + variables, + } + } +} + +impl<'db, T: NextSolverToChalk<'db, U>, U: HasInterner> + NextSolverToChalk<'db, chalk_ir::Canonical> for Canonical<'db, T> +{ + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Canonical { + let binders = chalk_ir::CanonicalVarKinds::from_iter( + Interner, + self.variables.iter().map(|v| match v { + rustc_type_ir::CanonicalVarKind::Ty { ui, sub_root: _ } => { + chalk_ir::CanonicalVarKind::new( + chalk_ir::VariableKind::Ty(TyVariableKind::General), + chalk_ir::UniverseIndex { counter: ui.as_usize() }, + ) + } + rustc_type_ir::CanonicalVarKind::Int => chalk_ir::CanonicalVarKind::new( + chalk_ir::VariableKind::Ty(TyVariableKind::Integer), + chalk_ir::UniverseIndex::root(), + ), + rustc_type_ir::CanonicalVarKind::Float => chalk_ir::CanonicalVarKind::new( + chalk_ir::VariableKind::Ty(TyVariableKind::Float), + chalk_ir::UniverseIndex::root(), + ), + rustc_type_ir::CanonicalVarKind::Region(ui) => chalk_ir::CanonicalVarKind::new( + chalk_ir::VariableKind::Lifetime, + chalk_ir::UniverseIndex { counter: ui.as_usize() }, + ), + rustc_type_ir::CanonicalVarKind::Const(ui) => chalk_ir::CanonicalVarKind::new( + chalk_ir::VariableKind::Const(chalk_ir::TyKind::Error.intern(Interner)), + chalk_ir::UniverseIndex { counter: ui.as_usize() }, + ), + rustc_type_ir::CanonicalVarKind::PlaceholderTy(_) => unimplemented!(), + rustc_type_ir::CanonicalVarKind::PlaceholderRegion(_) => unimplemented!(), + rustc_type_ir::CanonicalVarKind::PlaceholderConst(_) => unimplemented!(), + }), + ); + let value = self.value.to_chalk(interner); + chalk_ir::Canonical { binders, value } + } +} + +impl<'db> ChalkToNextSolver<'db, Predicate<'db>> for chalk_ir::Goal { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Predicate<'db> { + match self.data(Interner) { + chalk_ir::GoalData::Quantified(quantifier_kind, binders) => { + if !binders.binders.is_empty(Interner) { + panic!("Should not be constructed."); + } + let (val, _) = binders.clone().into_value_and_skipped_binders(); + val.shifted_out(Interner).unwrap().to_nextsolver(interner) + } + chalk_ir::GoalData::Implies(program_clauses, goal) => { + panic!("Should not be constructed.") + } + chalk_ir::GoalData::All(goals) => panic!("Should not be constructed."), + chalk_ir::GoalData::Not(goal) => panic!("Should not be constructed."), + chalk_ir::GoalData::EqGoal(eq_goal) => { + let arg_to_term = |g: &chalk_ir::GenericArg| match g.data(Interner) { + chalk_ir::GenericArgData::Ty(ty) => Term::Ty(ty.to_nextsolver(interner)), + chalk_ir::GenericArgData::Const(const_) => { + Term::Const(const_.to_nextsolver(interner)) + } + chalk_ir::GenericArgData::Lifetime(lifetime) => unreachable!(), + }; + let pred_kind = PredicateKind::AliasRelate( + arg_to_term(&eq_goal.a), + arg_to_term(&eq_goal.b), + rustc_type_ir::AliasRelationDirection::Equate, + ); + let pred_kind = + Binder::bind_with_vars(pred_kind, BoundVarKinds::new_from_iter(interner, [])); + Predicate::new(interner, pred_kind) + } + chalk_ir::GoalData::SubtypeGoal(subtype_goal) => { + let subtype_predicate = SubtypePredicate { + a: subtype_goal.a.to_nextsolver(interner), + b: subtype_goal.b.to_nextsolver(interner), + a_is_expected: true, + }; + let pred_kind = PredicateKind::Subtype(subtype_predicate); + let pred_kind = Binder::bind_with_vars( + shift_vars(interner, pred_kind, 1), + BoundVarKinds::new_from_iter(interner, []), + ); + Predicate::new(interner, pred_kind) + } + chalk_ir::GoalData::DomainGoal(domain_goal) => { + let pred_kind = domain_goal.to_nextsolver(interner); + let pred_kind = Binder::bind_with_vars( + shift_vars(interner, pred_kind, 1), + BoundVarKinds::new_from_iter(interner, []), + ); + Predicate::new(interner, pred_kind) + } + chalk_ir::GoalData::CannotProve => panic!("Should not be constructed."), + } + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::Goal> for Predicate<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Goal { + chalk_ir::Goal::new(Interner, self.kind().skip_binder().to_chalk(interner)) + } +} + +impl<'db> NextSolverToChalk<'db, crate::ProjectionTy> for crate::next_solver::AliasTy<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> crate::ProjectionTy { + let SolverDefId::TypeAliasId(assoc_id) = self.def_id else { unreachable!() }; + crate::ProjectionTy { + associated_ty_id: to_assoc_type_id(assoc_id), + substitution: self.args.to_chalk(interner), + } + } +} + +impl<'db> ChalkToNextSolver<'db, ParamEnv<'db>> for chalk_ir::Environment { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> ParamEnv<'db> { + let clauses = Clauses::new_from_iter( + interner, + self.clauses.iter(Interner).map(|c| c.to_nextsolver(interner)), + ); + let clauses = + Clauses::new_from_iter(interner, elaborate::elaborate(interner, clauses.iter())); + ParamEnv { clauses } + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::Environment> for ParamEnv<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Environment { + let clauses = chalk_ir::ProgramClauses::from_iter( + Interner, + self.clauses.iter().filter_map(|c| -> Option> { + c.to_chalk(interner) + }), + ); + chalk_ir::Environment { clauses } + } +} + +impl<'db> ChalkToNextSolver<'db, Clause<'db>> for chalk_ir::ProgramClause { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> Clause<'db> { + Clause(Predicate::new(interner, self.data(Interner).0.to_nextsolver(interner))) + } +} + +impl<'db> NextSolverToChalk<'db, Option>> for Clause<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> Option> { + let value: chalk_ir::ProgramClauseImplication = + as NextSolverToChalk< + 'db, + Option>, + >>::to_chalk(self.0.kind().skip_binder(), interner)?; + Some(chalk_ir::ProgramClause::new( + Interner, + chalk_ir::ProgramClauseData(chalk_ir::Binders::empty(Interner, value)), + )) + } +} + +impl<'db> ChalkToNextSolver<'db, PredicateKind<'db>> + for chalk_ir::ProgramClauseImplication +{ + fn to_nextsolver(&self, interner: DbInterner<'db>) -> PredicateKind<'db> { + assert!(self.conditions.is_empty(Interner)); + assert!(self.constraints.is_empty(Interner)); + self.consequence.to_nextsolver(interner) + } +} + +impl<'db> NextSolverToChalk<'db, Option>> + for PredicateKind<'db> +{ + fn to_chalk( + self, + interner: DbInterner<'db>, + ) -> Option> { + let chalk_ir::GoalData::DomainGoal(consequence) = self.to_chalk(interner) else { + return None; + }; + + Some(chalk_ir::ProgramClauseImplication { + consequence, + conditions: chalk_ir::Goals::empty(Interner), + constraints: chalk_ir::Constraints::empty(Interner), + priority: chalk_ir::ClausePriority::High, + }) + } +} + +impl<'db> ChalkToNextSolver<'db, PredicateKind<'db>> for chalk_ir::DomainGoal { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> PredicateKind<'db> { + match self { + chalk_ir::DomainGoal::Holds(where_clause) => match where_clause { + chalk_ir::WhereClause::Implemented(trait_ref) => { + let predicate = TraitPredicate { + trait_ref: trait_ref.to_nextsolver(interner), + polarity: rustc_type_ir::PredicatePolarity::Positive, + }; + PredicateKind::Clause(ClauseKind::Trait(predicate)) + } + chalk_ir::WhereClause::AliasEq(alias_eq) => match &alias_eq.alias { + chalk_ir::AliasTy::Projection(p) => { + let def_id = + SolverDefId::TypeAliasId(from_assoc_type_id(p.associated_ty_id)); + let args = p.substitution.to_nextsolver(interner); + let term: Ty<'db> = alias_eq.ty.to_nextsolver(interner); + let term: Term<'db> = term.into(); + let predicate = ProjectionPredicate { + projection_term: AliasTerm::new_from_args(interner, def_id, args), + term, + }; + PredicateKind::Clause(ClauseKind::Projection(predicate)) + } + chalk_ir::AliasTy::Opaque(opaque) => { + let id: InternedOpaqueTyId = opaque.opaque_ty_id.into(); + let def_id = SolverDefId::InternedOpaqueTyId(id); + let args = opaque.substitution.to_nextsolver(interner); + let term: Ty<'db> = alias_eq.ty.to_nextsolver(interner); + let term: Term<'db> = term.into(); + let opaque_ty = Ty::new( + interner, + rustc_type_ir::TyKind::Alias( + rustc_type_ir::AliasTyKind::Opaque, + rustc_type_ir::AliasTy::new_from_args(interner, def_id, args), + ), + ) + .into(); + PredicateKind::AliasRelate( + opaque_ty, + term, + rustc_type_ir::AliasRelationDirection::Equate, + ) + } + }, + chalk_ir::WhereClause::LifetimeOutlives(lifetime_outlives) => { + let predicate = OutlivesPredicate( + lifetime_outlives.a.to_nextsolver(interner), + lifetime_outlives.b.to_nextsolver(interner), + ); + PredicateKind::Clause(ClauseKind::RegionOutlives(predicate)) + } + chalk_ir::WhereClause::TypeOutlives(type_outlives) => { + let predicate = OutlivesPredicate( + type_outlives.ty.to_nextsolver(interner), + type_outlives.lifetime.to_nextsolver(interner), + ); + PredicateKind::Clause(ClauseKind::TypeOutlives(predicate)) + } + }, + chalk_ir::DomainGoal::Normalize(normalize) => { + let proj_ty = match &normalize.alias { + chalk_ir::AliasTy::Projection(proj) => proj, + _ => unimplemented!(), + }; + let args: GenericArgs<'db> = proj_ty.substitution.to_nextsolver(interner); + let alias = Ty::new( + interner, + rustc_type_ir::TyKind::Alias( + rustc_type_ir::AliasTyKind::Projection, + rustc_type_ir::AliasTy::new( + interner, + from_assoc_type_id(proj_ty.associated_ty_id).into(), + args, + ), + ), + ) + .into(); + let term = normalize.ty.to_nextsolver(interner).into(); + PredicateKind::AliasRelate( + alias, + term, + rustc_type_ir::AliasRelationDirection::Equate, + ) + } + chalk_ir::DomainGoal::WellFormed(well_formed) => { + let term = match well_formed { + WellFormed::Trait(_) => panic!("Should not be constructed."), + WellFormed::Ty(ty) => Term::Ty(ty.to_nextsolver(interner)), + }; + PredicateKind::Clause(rustc_type_ir::ClauseKind::WellFormed(term)) + } + chalk_ir::DomainGoal::FromEnv(from_env) => match from_env { + chalk_ir::FromEnv::Trait(trait_ref) => { + let predicate = TraitPredicate { + trait_ref: trait_ref.to_nextsolver(interner), + polarity: rustc_type_ir::PredicatePolarity::Positive, + }; + PredicateKind::Clause(ClauseKind::Trait(predicate)) + } + chalk_ir::FromEnv::Ty(ty) => PredicateKind::Clause(ClauseKind::WellFormed( + Term::Ty(ty.to_nextsolver(interner)), + )), + }, + chalk_ir::DomainGoal::IsLocal(ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::IsUpstream(ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::IsFullyVisible(ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::LocalImplAllowed(trait_ref) => { + panic!("Should not be constructed.") + } + chalk_ir::DomainGoal::Compatible => panic!("Should not be constructed."), + chalk_ir::DomainGoal::DownstreamType(ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::Reveal => panic!("Should not be constructed."), + chalk_ir::DomainGoal::ObjectSafe(trait_id) => panic!("Should not be constructed."), + } + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::GoalData> for PredicateKind<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::GoalData { + match self { + rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Trait(trait_pred)) => { + let trait_ref = trait_pred.trait_ref.to_chalk(interner); + let where_clause = chalk_ir::WhereClause::Implemented(trait_ref); + chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) + } + rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Projection( + proj_predicate, + )) => { + let associated_ty_id = match proj_predicate.def_id() { + SolverDefId::TypeAliasId(id) => to_assoc_type_id(id), + _ => unreachable!(), + }; + let substitution = proj_predicate.projection_term.args.to_chalk(interner); + let alias = chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { + associated_ty_id, + substitution, + }); + let ty = match proj_predicate.term.kind() { + rustc_type_ir::TermKind::Ty(ty) => ty, + rustc_type_ir::TermKind::Const(_) => unimplemented!(), + }; + let ty = ty.to_chalk(interner); + let alias_eq = chalk_ir::AliasEq { alias, ty }; + let where_clause = chalk_ir::WhereClause::AliasEq(alias_eq); + chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) + } + rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::TypeOutlives( + outlives, + )) => { + let lifetime = outlives.1.to_chalk(interner); + let ty = outlives.0.to_chalk(interner); + let where_clause = + chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives { lifetime, ty }); + chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) + } + rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::RegionOutlives( + outlives, + )) => { + let a = outlives.0.to_chalk(interner); + let b = outlives.1.to_chalk(interner); + let where_clause = + chalk_ir::WhereClause::LifetimeOutlives(chalk_ir::LifetimeOutlives { a, b }); + chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) + } + rustc_type_ir::PredicateKind::AliasRelate( + alias_term, + target_term, + alias_relation_direction, + ) => { + let term_to_generic_arg = |term: Term<'db>| match term { + Term::Ty(ty) => chalk_ir::GenericArg::new( + Interner, + chalk_ir::GenericArgData::Ty(ty.to_chalk(interner)), + ), + Term::Const(const_) => chalk_ir::GenericArg::new( + Interner, + chalk_ir::GenericArgData::Const(const_.to_chalk(interner)), + ), + }; + + chalk_ir::GoalData::EqGoal(chalk_ir::EqGoal { + a: term_to_generic_arg(alias_term), + b: term_to_generic_arg(target_term), + }) + } + rustc_type_ir::PredicateKind::Clause(_) => unimplemented!(), + rustc_type_ir::PredicateKind::DynCompatible(_) => unimplemented!(), + rustc_type_ir::PredicateKind::Subtype(_) => unimplemented!(), + rustc_type_ir::PredicateKind::Coerce(_) => unimplemented!(), + rustc_type_ir::PredicateKind::ConstEquate(_, _) => unimplemented!(), + rustc_type_ir::PredicateKind::Ambiguous => unimplemented!(), + rustc_type_ir::PredicateKind::NormalizesTo(_) => unimplemented!(), + } + } +} + +impl<'db> ChalkToNextSolver<'db, TraitRef<'db>> for chalk_ir::TraitRef { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> TraitRef<'db> { + let args = self.substitution.to_nextsolver(interner); + TraitRef::new_from_args(interner, from_chalk_trait_id(self.trait_id).into(), args) + } +} + +impl<'db> NextSolverToChalk<'db, chalk_ir::TraitRef> for TraitRef<'db> { + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::TraitRef { + let trait_id = to_chalk_trait_id(self.def_id.0); + let substitution = self.args.to_chalk(interner); + chalk_ir::TraitRef { trait_id, substitution } + } +} + +impl<'db> ChalkToNextSolver<'db, PredicateKind<'db>> for chalk_ir::WhereClause { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> PredicateKind<'db> { + match self { + chalk_ir::WhereClause::Implemented(trait_ref) => { + let predicate = TraitPredicate { + trait_ref: trait_ref.to_nextsolver(interner), + polarity: rustc_type_ir::PredicatePolarity::Positive, + }; + PredicateKind::Clause(ClauseKind::Trait(predicate)) + } + chalk_ir::WhereClause::AliasEq(alias_eq) => { + let projection = match &alias_eq.alias { + chalk_ir::AliasTy::Projection(p) => p, + _ => unimplemented!(), + }; + let def_id = + SolverDefId::TypeAliasId(from_assoc_type_id(projection.associated_ty_id)); + let args = projection.substitution.to_nextsolver(interner); + let term: Ty<'db> = alias_eq.ty.to_nextsolver(interner); + let term: Term<'db> = term.into(); + let predicate = ProjectionPredicate { + projection_term: AliasTerm::new_from_args(interner, def_id, args), + term, + }; + PredicateKind::Clause(ClauseKind::Projection(predicate)) + } + chalk_ir::WhereClause::TypeOutlives(type_outlives) => { + let ty = type_outlives.ty.to_nextsolver(interner); + let r = type_outlives.lifetime.to_nextsolver(interner); + PredicateKind::Clause(ClauseKind::TypeOutlives(OutlivesPredicate(ty, r))) + } + chalk_ir::WhereClause::LifetimeOutlives(lifetime_outlives) => { + let a = lifetime_outlives.a.to_nextsolver(interner); + let b = lifetime_outlives.b.to_nextsolver(interner); + PredicateKind::Clause(ClauseKind::RegionOutlives(OutlivesPredicate(a, b))) + } + } + } +} + +impl<'db, I> NextSolverToChalk<'db, chalk_ir::ConstrainedSubst> for I +where + I: IntoIterator>, +{ + fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::ConstrainedSubst { + chalk_ir::ConstrainedSubst { + constraints: chalk_ir::Constraints::empty(Interner), + subst: GenericArgs::new_from_iter(interner, self).to_chalk(interner), + } + } +} + +pub fn convert_canonical_args_for_result<'db>( + interner: DbInterner<'db>, + args: Canonical<'db, Vec>>, +) -> chalk_ir::Canonical> { + args.to_chalk(interner) +} + +pub fn convert_args_for_result<'db>( + interner: DbInterner<'db>, + args: &[GenericArg<'db>], +) -> crate::Substitution { + let mut substs = Vec::with_capacity(args.len()); + for arg in args { + match (*arg).kind() { + rustc_type_ir::GenericArgKind::Type(ty) => { + let ty = convert_ty_for_result(interner, ty); + substs.push(chalk_ir::GenericArgData::Ty(ty).intern(Interner)); + } + rustc_type_ir::GenericArgKind::Lifetime(region) => { + let lifetime = convert_region_for_result(interner, region); + substs.push(chalk_ir::GenericArgData::Lifetime(lifetime).intern(Interner)); + } + rustc_type_ir::GenericArgKind::Const(const_) => { + substs.push( + chalk_ir::GenericArgData::Const(convert_const_for_result(interner, const_)) + .intern(Interner), + ); + } + } + } + Substitution::from_iter(Interner, substs) +} + +pub(crate) fn convert_ty_for_result<'db>(interner: DbInterner<'db>, ty: Ty<'db>) -> crate::Ty { + use crate::{Scalar, TyKind}; + use chalk_ir::{FloatTy, IntTy, UintTy}; + match ty.kind() { + rustc_type_ir::TyKind::Bool => TyKind::Scalar(Scalar::Bool), + rustc_type_ir::TyKind::Char => TyKind::Scalar(Scalar::Char), + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I8) => { + TyKind::Scalar(Scalar::Int(IntTy::I8)) + } + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I16) => { + TyKind::Scalar(Scalar::Int(IntTy::I16)) + } + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I32) => { + TyKind::Scalar(Scalar::Int(IntTy::I32)) + } + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I64) => { + TyKind::Scalar(Scalar::Int(IntTy::I64)) + } + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I128) => { + TyKind::Scalar(Scalar::Int(IntTy::I128)) + } + rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::Isize) => { + TyKind::Scalar(Scalar::Int(IntTy::Isize)) + } + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U8) => { + TyKind::Scalar(Scalar::Uint(UintTy::U8)) + } + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U16) => { + TyKind::Scalar(Scalar::Uint(UintTy::U16)) + } + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U32) => { + TyKind::Scalar(Scalar::Uint(UintTy::U32)) + } + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U64) => { + TyKind::Scalar(Scalar::Uint(UintTy::U64)) + } + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U128) => { + TyKind::Scalar(Scalar::Uint(UintTy::U128)) + } + rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::Usize) => { + TyKind::Scalar(Scalar::Uint(UintTy::Usize)) + } + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F16) => { + TyKind::Scalar(Scalar::Float(FloatTy::F16)) + } + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F32) => { + TyKind::Scalar(Scalar::Float(FloatTy::F32)) + } + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F64) => { + TyKind::Scalar(Scalar::Float(FloatTy::F64)) + } + rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F128) => { + TyKind::Scalar(Scalar::Float(FloatTy::F128)) + } + rustc_type_ir::TyKind::Str => TyKind::Str, + rustc_type_ir::TyKind::Error(_) => TyKind::Error, + rustc_type_ir::TyKind::Never => TyKind::Never, + + rustc_type_ir::TyKind::Adt(def, args) => { + let adt_id = def.inner().id; + let subst = convert_args_for_result(interner, args.as_slice()); + TyKind::Adt(chalk_ir::AdtId(adt_id), subst) + } + + rustc_type_ir::TyKind::Infer(infer_ty) => { + let (var, kind) = match infer_ty { + rustc_type_ir::InferTy::TyVar(var) => { + (InferenceVar::from(var.as_u32()), TyVariableKind::General) + } + rustc_type_ir::InferTy::IntVar(var) => { + (InferenceVar::from(var.as_u32()), TyVariableKind::Integer) + } + rustc_type_ir::InferTy::FloatVar(var) => { + (InferenceVar::from(var.as_u32()), TyVariableKind::Float) + } + rustc_type_ir::InferTy::FreshFloatTy(..) + | rustc_type_ir::InferTy::FreshIntTy(..) + | rustc_type_ir::InferTy::FreshTy(..) => { + panic!("Freshening shouldn't happen.") + } + }; + TyKind::InferenceVar(var, kind) + } + + rustc_type_ir::TyKind::Ref(r, ty, mutability) => { + let mutability = match mutability { + rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, + rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, + }; + let r = convert_region_for_result(interner, r); + let ty = convert_ty_for_result(interner, ty); + TyKind::Ref(mutability, r, ty) + } + + rustc_type_ir::TyKind::Tuple(tys) => { + let size = tys.len(); + let subst = Substitution::from_iter( + Interner, + tys.iter().map(|ty| { + chalk_ir::GenericArgData::Ty(convert_ty_for_result(interner, ty)) + .intern(Interner) + }), + ); + TyKind::Tuple(size, subst) + } + + rustc_type_ir::TyKind::Array(ty, const_) => { + let ty = convert_ty_for_result(interner, ty); + let const_ = convert_const_for_result(interner, const_); + TyKind::Array(ty, const_) + } + + rustc_type_ir::TyKind::Alias(alias_ty_kind, alias_ty) => match alias_ty_kind { + rustc_type_ir::AliasTyKind::Projection => { + let assoc_ty_id = match alias_ty.def_id { + SolverDefId::TypeAliasId(id) => id, + _ => unreachable!(), + }; + let associated_ty_id = to_assoc_type_id(assoc_ty_id); + let substitution = convert_args_for_result(interner, alias_ty.args.as_slice()); + TyKind::Alias(crate::AliasTy::Projection(crate::ProjectionTy { + associated_ty_id, + substitution, + })) + } + rustc_type_ir::AliasTyKind::Opaque => { + let opaque_ty_id = match alias_ty.def_id { + SolverDefId::InternedOpaqueTyId(id) => id, + _ => unreachable!(), + }; + let substitution = convert_args_for_result(interner, alias_ty.args.as_slice()); + TyKind::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { + opaque_ty_id: opaque_ty_id.into(), + substitution, + })) + } + rustc_type_ir::AliasTyKind::Inherent => unimplemented!(), + rustc_type_ir::AliasTyKind::Free => unimplemented!(), + }, + + // For `Placeholder`, `Bound` and `Param`, see the comment on the reverse conversion. + rustc_type_ir::TyKind::Placeholder(placeholder) => { + unimplemented!( + "A `rustc_type_ir::TyKind::Placeholder` doesn't have a direct \ + correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ + It therefore feels safer to leave it panicking, but if you hit this panic \ + feel free to do the same as in `rustc_type_ir::TyKind::Bound` here." + ) + } + rustc_type_ir::TyKind::Bound(debruijn_index, ty) => TyKind::BoundVar(chalk_ir::BoundVar { + debruijn: chalk_ir::DebruijnIndex::new(debruijn_index.as_u32()), + index: ty.var.as_usize(), + }), + rustc_type_ir::TyKind::Param(param) => { + let placeholder = to_placeholder_idx(interner.db, param.id.into(), param.index); + TyKind::Placeholder(placeholder) + } + + rustc_type_ir::TyKind::FnPtr(bound_sig, fn_header) => { + let num_binders = bound_sig.bound_vars().len(); + let sig = chalk_ir::FnSig { + abi: fn_header.abi, + safety: match fn_header.safety { + crate::next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe, + crate::next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, + }, + variadic: fn_header.c_variadic, + }; + let args = GenericArgs::new_from_iter( + interner, + bound_sig.skip_binder().inputs_and_output.iter().map(|a| a.into()), + ); + let substitution = convert_args_for_result(interner, args.as_slice()); + let substitution = chalk_ir::FnSubst(substitution); + let fnptr = chalk_ir::FnPointer { num_binders, sig, substitution }; + TyKind::Function(fnptr) + } + + rustc_type_ir::TyKind::Dynamic(preds, region) => { + let self_ty = Ty::new_bound( + interner, + DebruijnIndex::from_u32(1), + BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_u32(0) }, + ); + let bounds = chalk_ir::QuantifiedWhereClauses::from_iter( + Interner, + preds.iter().map(|p| { + let binders = chalk_ir::VariableKinds::from_iter( + Interner, + p.bound_vars().iter().map(|b| match b { + BoundVarKind::Ty(kind) => { + chalk_ir::VariableKind::Ty(TyVariableKind::General) + } + BoundVarKind::Region(kind) => chalk_ir::VariableKind::Lifetime, + BoundVarKind::Const => { + chalk_ir::VariableKind::Const(crate::TyKind::Error.intern(Interner)) + } + }), + ); + + // Rust and chalk have slightly different + // representation for trait objects. + // + // Chalk uses `for for<'a> T0: Trait<'a>` while rustc + // uses `ExistentialPredicate`s, which do not have a self ty. + // We need to shift escaping bound vars by 1 to accommodate + // the newly introduced `for` binder. + let p = shift_vars(interner, p, 1); + + let where_clause = match p.skip_binder() { + rustc_type_ir::ExistentialPredicate::Trait(trait_ref) => { + let trait_ref = TraitRef::new( + interner, + trait_ref.def_id, + [self_ty.into()].into_iter().chain(trait_ref.args.iter()), + ); + let trait_id = to_chalk_trait_id(trait_ref.def_id.0); + let substitution = + convert_args_for_result(interner, trait_ref.args.as_slice()); + let trait_ref = chalk_ir::TraitRef { trait_id, substitution }; + chalk_ir::WhereClause::Implemented(trait_ref) + } + rustc_type_ir::ExistentialPredicate::AutoTrait(trait_) => { + let trait_id = to_chalk_trait_id(trait_.0); + let substitution = chalk_ir::Substitution::from1( + Interner, + convert_ty_for_result(interner, self_ty), + ); + let trait_ref = chalk_ir::TraitRef { trait_id, substitution }; + chalk_ir::WhereClause::Implemented(trait_ref) + } + rustc_type_ir::ExistentialPredicate::Projection(existential_projection) => { + let projection = ProjectionPredicate { + projection_term: AliasTerm::new( + interner, + existential_projection.def_id, + [self_ty.into()] + .iter() + .chain(existential_projection.args.iter()), + ), + term: existential_projection.term, + }; + let associated_ty_id = match projection.projection_term.def_id { + SolverDefId::TypeAliasId(id) => to_assoc_type_id(id), + _ => unreachable!(), + }; + let substitution = convert_args_for_result( + interner, + projection.projection_term.args.as_slice(), + ); + let alias = chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { + associated_ty_id, + substitution, + }); + let ty = match projection.term { + Term::Ty(ty) => ty, + _ => unreachable!(), + }; + let ty = convert_ty_for_result(interner, ty); + let alias_eq = chalk_ir::AliasEq { alias, ty }; + chalk_ir::WhereClause::AliasEq(alias_eq) + } + }; + chalk_ir::Binders::new(binders, where_clause) + }), + ); + let binders = chalk_ir::VariableKinds::from1( + Interner, + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General), + ); + let bounds = chalk_ir::Binders::new(binders, bounds); + let dyn_ty = + chalk_ir::DynTy { bounds, lifetime: convert_region_for_result(interner, region) }; + TyKind::Dyn(dyn_ty) + } + + rustc_type_ir::TyKind::Slice(ty) => { + let ty = convert_ty_for_result(interner, ty); + TyKind::Slice(ty) + } + + rustc_type_ir::TyKind::Foreign(foreign) => TyKind::Foreign(to_foreign_def_id(foreign.0)), + rustc_type_ir::TyKind::Pat(_, _) => unimplemented!(), + rustc_type_ir::TyKind::RawPtr(ty, mutability) => { + let mutability = match mutability { + rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, + rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, + }; + let ty = convert_ty_for_result(interner, ty); + TyKind::Raw(mutability, ty) + } + rustc_type_ir::TyKind::FnDef(def_id, args) => { + let subst = convert_args_for_result(interner, args.as_slice()); + TyKind::FnDef(def_id.0.to_chalk(interner.db()), subst) + } + + rustc_type_ir::TyKind::Closure(def_id, args) => { + let subst = convert_args_for_result(interner, args.as_slice()); + TyKind::Closure(def_id.0.into(), subst) + } + rustc_type_ir::TyKind::CoroutineClosure(_, _) => unimplemented!(), + rustc_type_ir::TyKind::Coroutine(def_id, args) => { + let subst = convert_args_for_result(interner, args.as_slice()); + TyKind::Coroutine(def_id.0.into(), subst) + } + rustc_type_ir::TyKind::CoroutineWitness(def_id, args) => { + let subst = convert_args_for_result(interner, args.as_slice()); + TyKind::CoroutineWitness(def_id.0.into(), subst) + } + + rustc_type_ir::TyKind::UnsafeBinder(_) => unimplemented!(), + } + .intern(Interner) +} + +pub fn convert_const_for_result<'db>( + interner: DbInterner<'db>, + const_: Const<'db>, +) -> crate::Const { + let value: chalk_ir::ConstValue = match const_.kind() { + rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Var(var)) => { + chalk_ir::ConstValue::InferenceVar(chalk_ir::InferenceVar::from(var.as_u32())) + } + rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Fresh(fresh)) => { + panic!("Vars should not be freshened.") + } + rustc_type_ir::ConstKind::Param(param) => { + let placeholder = to_placeholder_idx(interner.db, param.id.into(), param.index); + chalk_ir::ConstValue::Placeholder(placeholder) + } + rustc_type_ir::ConstKind::Bound(debruijn_index, var) => { + chalk_ir::ConstValue::BoundVar(chalk_ir::BoundVar::new( + chalk_ir::DebruijnIndex::new(debruijn_index.as_u32()), + var.var.index(), + )) + } + rustc_type_ir::ConstKind::Placeholder(placeholder_const) => { + unimplemented!( + "A `rustc_type_ir::ConstKind::Placeholder` doesn't have a direct \ + correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ + It therefore feels safer to leave it panicking, but if you hit this panic \ + feel free to do the same as in `rustc_type_ir::ConstKind::Bound` here." + ) + } + rustc_type_ir::ConstKind::Unevaluated(unevaluated_const) => { + let id = match unevaluated_const.def { + SolverDefId::ConstId(id) => GeneralConstId::ConstId(id), + SolverDefId::StaticId(id) => GeneralConstId::StaticId(id), + _ => unreachable!(), + }; + let subst = convert_args_for_result(interner, unevaluated_const.args.as_slice()); + chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { + interned: ConstScalar::UnevaluatedConst(id, subst), + }) + } + rustc_type_ir::ConstKind::Value(value_const) => { + let bytes = value_const.value.inner(); + let value = chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { + // SAFETY: we will never actually use this without a database + interned: ConstScalar::Bytes(bytes.0.clone(), unsafe { + std::mem::transmute::, MemoryMap<'static>>(bytes.1.clone()) + }), + }); + return chalk_ir::ConstData { + ty: convert_ty_for_result(interner, value_const.ty), + value, + } + .intern(Interner); + } + rustc_type_ir::ConstKind::Error(_) => { + chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { + interned: ConstScalar::Unknown, + }) + } + rustc_type_ir::ConstKind::Expr(_) => unimplemented!(), + }; + chalk_ir::ConstData { ty: crate::TyKind::Error.intern(Interner), value }.intern(Interner) +} + +pub fn convert_region_for_result<'db>( + interner: DbInterner<'db>, + region: Region<'db>, +) -> crate::Lifetime { + let lifetime = match region.kind() { + rustc_type_ir::RegionKind::ReEarlyParam(early) => { + let placeholder = lt_to_placeholder_idx(interner.db, early.id, early.index); + chalk_ir::LifetimeData::Placeholder(placeholder) + } + rustc_type_ir::RegionKind::ReBound(db, bound) => { + chalk_ir::LifetimeData::BoundVar(chalk_ir::BoundVar::new( + chalk_ir::DebruijnIndex::new(db.as_u32()), + bound.var.as_usize(), + )) + } + rustc_type_ir::RegionKind::RePlaceholder(placeholder) => unimplemented!( + "A `rustc_type_ir::RegionKind::RePlaceholder` doesn't have a direct \ + correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ + It therefore feels safer to leave it panicking, but if you hit this panic \ + feel free to do the same as in `rustc_type_ir::RegionKind::ReBound` here." + ), + rustc_type_ir::RegionKind::ReLateParam(_) => unimplemented!(), + rustc_type_ir::RegionKind::ReStatic => chalk_ir::LifetimeData::Static, + rustc_type_ir::RegionKind::ReVar(vid) => { + chalk_ir::LifetimeData::InferenceVar(chalk_ir::InferenceVar::from(vid.as_u32())) + } + rustc_type_ir::RegionKind::ReErased => chalk_ir::LifetimeData::Erased, + rustc_type_ir::RegionKind::ReError(_) => chalk_ir::LifetimeData::Error, + }; + chalk_ir::Lifetime::new(Interner, lifetime) +} + +pub trait InferenceVarExt { + fn to_vid(self) -> rustc_type_ir::TyVid; + fn from_vid(vid: rustc_type_ir::TyVid) -> InferenceVar; +} + +impl InferenceVarExt for InferenceVar { + fn to_vid(self) -> rustc_type_ir::TyVid { + rustc_type_ir::TyVid::from_u32(self.index()) + } + fn from_vid(vid: rustc_type_ir::TyVid) -> InferenceVar { + InferenceVar::from(vid.as_u32()) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/normalize.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/normalize.rs new file mode 100644 index 0000000000000..41cb4884404f1 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/normalize.rs @@ -0,0 +1,281 @@ +use rustc_next_trait_solver::placeholder::BoundVarReplacer; +use rustc_type_ir::{ + AliasRelationDirection, FallibleTypeFolder, Flags, Interner, TermKind, TypeFoldable, + TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, + inherent::{IntoKind, Term as _}, +}; + +use crate::next_solver::SolverDefId; +use crate::next_solver::{ + Binder, Const, ConstKind, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Term, Ty, + TyKind, + fulfill::{FulfillmentCtxt, NextSolverError}, + infer::{ + InferCtxt, + at::At, + traits::{Obligation, ObligationCause}, + }, + util::PlaceholderReplacer, +}; + +/// Deeply normalize all aliases in `value`. This does not handle inference and expects +/// its input to be already fully resolved. +pub fn deeply_normalize<'db, T>(at: At<'_, 'db>, value: T) -> Result>> +where + T: TypeFoldable>, +{ + assert!(!value.has_escaping_bound_vars()); + deeply_normalize_with_skipped_universes(at, value, vec![]) +} + +/// Deeply normalize all aliases in `value`. This does not handle inference and expects +/// its input to be already fully resolved. +/// +/// Additionally takes a list of universes which represents the binders which have been +/// entered before passing `value` to the function. This is currently needed for +/// `normalize_erasing_regions`, which skips binders as it walks through a type. +pub fn deeply_normalize_with_skipped_universes<'db, T>( + at: At<'_, 'db>, + value: T, + universes: Vec>, +) -> Result>> +where + T: TypeFoldable>, +{ + let (value, coroutine_goals) = + deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( + at, value, universes, + )?; + assert_eq!(coroutine_goals, vec![]); + + Ok(value) +} + +/// Deeply normalize all aliases in `value`. This does not handle inference and expects +/// its input to be already fully resolved. +/// +/// Additionally takes a list of universes which represents the binders which have been +/// entered before passing `value` to the function. This is currently needed for +/// `normalize_erasing_regions`, which skips binders as it walks through a type. +/// +/// This returns a set of stalled obligations involving coroutines if the typing mode of +/// the underlying infcx has any stalled coroutine def ids. +pub fn deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals<'db, T>( + at: At<'_, 'db>, + value: T, + universes: Vec>, +) -> Result<(T, Vec>>), Vec>> +where + T: TypeFoldable>, +{ + let fulfill_cx = FulfillmentCtxt::new(at.infcx); + let mut folder = NormalizationFolder { + at, + fulfill_cx, + depth: 0, + universes, + stalled_coroutine_goals: vec![], + }; + let value = value.try_fold_with(&mut folder)?; + let errors = folder.fulfill_cx.select_all_or_error(at.infcx); + if errors.is_empty() { Ok((value, folder.stalled_coroutine_goals)) } else { Err(errors) } +} + +struct NormalizationFolder<'me, 'db> { + at: At<'me, 'db>, + fulfill_cx: FulfillmentCtxt<'db>, + depth: usize, + universes: Vec>, + stalled_coroutine_goals: Vec>>, +} + +impl<'db> NormalizationFolder<'_, 'db> { + fn normalize_alias_term( + &mut self, + alias_term: Term<'db>, + ) -> Result, Vec>> { + let infcx = self.at.infcx; + let interner = infcx.interner; + let recursion_limit = interner.recursion_limit(); + + self.depth += 1; + + let infer_term = infcx.next_term_var_of_kind(alias_term); + let obligation = Obligation::new( + interner, + self.at.cause.clone(), + self.at.param_env, + PredicateKind::AliasRelate(alias_term, infer_term, AliasRelationDirection::Equate), + ); + + if self.depth > recursion_limit { + // let term = alias_term.to_alias_term().unwrap(); + // self.at.infcx.err_ctxt().report_overflow_error( + // OverflowCause::DeeplyNormalize(term), + // self.at.cause.span, + // true, + // |_| {}, + // ); + return Err(vec![NextSolverError::Overflow(obligation)]); + } + + self.fulfill_cx.register_predicate_obligation(infcx, obligation); + self.select_all_and_stall_coroutine_predicates()?; + + // Alias is guaranteed to be fully structurally resolved, + // so we can super fold here. + let term = infcx.resolve_vars_if_possible(infer_term); + // super-folding the `term` will directly fold the `Ty` or `Const` so + // we have to match on the term and super-fold them manually. + let result = match term.kind() { + TermKind::Ty(ty) => ty.try_super_fold_with(self)?.into(), + TermKind::Const(ct) => ct.try_super_fold_with(self)?.into(), + }; + self.depth -= 1; + Ok(result) + } + + fn select_all_and_stall_coroutine_predicates( + &mut self, + ) -> Result<(), Vec>> { + let errors = self.fulfill_cx.select_where_possible(self.at.infcx); + if !errors.is_empty() { + return Err(errors); + } + + self.stalled_coroutine_goals.extend( + self.fulfill_cx + .drain_stalled_obligations_for_coroutines(self.at.infcx) + .into_iter() + .map(|obl| obl.as_goal()), + ); + + let errors = self.fulfill_cx.collect_remaining_errors(self.at.infcx); + if !errors.is_empty() { + return Err(errors); + } + + Ok(()) + } +} + +impl<'db> FallibleTypeFolder> for NormalizationFolder<'_, 'db> { + type Error = Vec>; + + fn cx(&self) -> DbInterner<'db> { + self.at.infcx.interner + } + + fn try_fold_binder>>( + &mut self, + t: Binder<'db, T>, + ) -> Result, Self::Error> { + self.universes.push(None); + let t = t.try_super_fold_with(self)?; + self.universes.pop(); + Ok(t) + } + + fn try_fold_ty(&mut self, ty: Ty<'db>) -> Result, Self::Error> { + let infcx = self.at.infcx; + debug_assert_eq!(ty, infcx.shallow_resolve(ty)); + if !ty.has_aliases() { + return Ok(ty); + } + + let TyKind::Alias(..) = ty.kind() else { return ty.try_super_fold_with(self) }; + + if ty.has_escaping_bound_vars() { + let (ty, mapped_regions, mapped_types, mapped_consts) = + BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, ty); + let result = self.normalize_alias_term(ty.into())?.expect_type(); + Ok(PlaceholderReplacer::replace_placeholders( + infcx, + mapped_regions, + mapped_types, + mapped_consts, + &self.universes, + result, + )) + } else { + Ok(self.normalize_alias_term(ty.into())?.expect_type()) + } + } + + fn try_fold_const(&mut self, ct: Const<'db>) -> Result, Self::Error> { + let infcx = self.at.infcx; + debug_assert_eq!(ct, infcx.shallow_resolve_const(ct)); + if !ct.has_aliases() { + return Ok(ct); + } + + let ConstKind::Unevaluated(..) = ct.kind() else { return ct.try_super_fold_with(self) }; + + if ct.has_escaping_bound_vars() { + let (ct, mapped_regions, mapped_types, mapped_consts) = + BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, ct); + let result = self.normalize_alias_term(ct.into())?.expect_const(); + Ok(PlaceholderReplacer::replace_placeholders( + infcx, + mapped_regions, + mapped_types, + mapped_consts, + &self.universes, + result, + )) + } else { + Ok(self.normalize_alias_term(ct.into())?.expect_const()) + } + } +} + +// Deeply normalize a value and return it +pub(crate) fn deeply_normalize_for_diagnostics<'db, T: TypeFoldable>>( + infcx: &InferCtxt<'db>, + param_env: ParamEnv<'db>, + t: T, +) -> T { + t.fold_with(&mut DeeplyNormalizeForDiagnosticsFolder { + at: infcx.at(&ObligationCause::dummy(), param_env), + }) +} + +struct DeeplyNormalizeForDiagnosticsFolder<'a, 'tcx> { + at: At<'a, 'tcx>, +} + +impl<'db> TypeFolder> for DeeplyNormalizeForDiagnosticsFolder<'_, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.at.infcx.interner + } + + fn fold_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { + let infcx = self.at.infcx; + let result: Result<_, Vec>> = infcx.commit_if_ok(|_| { + deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( + self.at, + ty, + vec![None; ty.outer_exclusive_binder().as_usize()], + ) + }); + match result { + Ok((ty, _)) => ty, + Err(_) => ty.super_fold_with(self), + } + } + + fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> { + let infcx = self.at.infcx; + let result: Result<_, Vec>> = infcx.commit_if_ok(|_| { + deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( + self.at, + ct, + vec![None; ct.outer_exclusive_binder().as_usize()], + ) + }); + match result { + Ok((ct, _)) => ct, + Err(_) => ct.super_fold_with(self), + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/obligation_ctxt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/obligation_ctxt.rs new file mode 100644 index 0000000000000..8e2dc0dec4ed3 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/obligation_ctxt.rs @@ -0,0 +1,203 @@ +use hir_def::TraitId; +use rustc_type_ir::relate::Relate; +use rustc_type_ir::{TypeFoldable, Upcast, Variance}; + +use crate::next_solver::fulfill::{FulfillmentCtxt, NextSolverError}; +use crate::next_solver::infer::at::ToTrace; +use crate::next_solver::infer::traits::{ + Obligation, ObligationCause, PredicateObligation, PredicateObligations, +}; +use crate::next_solver::infer::{DefineOpaqueTypes, InferCtxt, InferOk, TypeTrace}; +use crate::next_solver::{Const, DbInterner, ParamEnv, Term, TraitRef, Ty, TypeError}; + +/// Used if you want to have pleasant experience when dealing +/// with obligations outside of hir or mir typeck. +pub struct ObligationCtxt<'a, 'db> { + pub infcx: &'a InferCtxt<'db>, + engine: FulfillmentCtxt<'db>, +} + +impl<'a, 'db> ObligationCtxt<'a, 'db> { + pub fn new(infcx: &'a InferCtxt<'db>) -> Self { + Self { infcx, engine: FulfillmentCtxt::new(infcx) } + } +} + +impl<'a, 'db> ObligationCtxt<'a, 'db> { + pub fn register_obligation(&mut self, obligation: PredicateObligation<'db>) { + self.engine.register_predicate_obligation(self.infcx, obligation); + } + + pub fn register_obligations( + &mut self, + obligations: impl IntoIterator>, + ) { + self.engine.register_predicate_obligations(self.infcx, obligations); + } + + pub fn register_infer_ok_obligations(&mut self, infer_ok: InferOk<'db, T>) -> T { + let InferOk { value, obligations } = infer_ok; + self.register_obligations(obligations); + value + } + + /// Requires that `ty` must implement the trait with `def_id` in + /// the given environment. This trait must not have any type + /// parameters (except for `Self`). + pub fn register_bound( + &mut self, + cause: ObligationCause, + param_env: ParamEnv<'db>, + ty: Ty<'db>, + def_id: TraitId, + ) { + let trait_ref = TraitRef::new(self.infcx.interner, def_id.into(), [ty]); + self.register_obligation(Obligation { + cause, + recursion_depth: 0, + param_env, + predicate: trait_ref.upcast(self.infcx.interner), + }); + } + + pub fn eq>( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + expected: T, + actual: T, + ) -> Result<(), TypeError<'db>> { + self.infcx + .at(cause, param_env) + .eq(DefineOpaqueTypes::Yes, expected, actual) + .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) + } + + pub fn eq_trace>>( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + trace: TypeTrace<'db>, + expected: T, + actual: T, + ) -> Result<(), TypeError<'db>> { + self.infcx + .at(cause, param_env) + .eq_trace(DefineOpaqueTypes::Yes, trace, expected, actual) + .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) + } + + /// Checks whether `expected` is a subtype of `actual`: `expected <: actual`. + pub fn sub>( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + expected: T, + actual: T, + ) -> Result<(), TypeError<'db>> { + self.infcx + .at(cause, param_env) + .sub(DefineOpaqueTypes::Yes, expected, actual) + .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) + } + + pub fn relate>( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + variance: Variance, + expected: T, + actual: T, + ) -> Result<(), TypeError<'db>> { + self.infcx + .at(cause, param_env) + .relate(DefineOpaqueTypes::Yes, expected, variance, actual) + .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) + } + + /// Checks whether `expected` is a supertype of `actual`: `expected :> actual`. + pub fn sup>( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + expected: T, + actual: T, + ) -> Result<(), TypeError<'db>> { + self.infcx + .at(cause, param_env) + .sup(DefineOpaqueTypes::Yes, expected, actual) + .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) + } + + /// Computes the least-upper-bound, or mutual supertype, of two values. + pub fn lub>( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + expected: T, + actual: T, + ) -> Result> { + self.infcx + .at(cause, param_env) + .lub(expected, actual) + .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) + } + + #[must_use] + pub fn select_where_possible(&mut self) -> Vec> { + self.engine.select_where_possible(self.infcx) + } + + #[must_use] + pub fn select_all_or_error(&mut self) -> Vec> { + self.engine.select_all_or_error(self.infcx) + } + + /// Returns the not-yet-processed and stalled obligations from the + /// `ObligationCtxt`. + /// + /// Takes ownership of the context as doing operations such as + /// [`ObligationCtxt::eq`] afterwards will result in other obligations + /// getting ignored. You can make a new `ObligationCtxt` if this + /// needs to be done in a loop, for example. + #[must_use] + pub fn into_pending_obligations(self) -> PredicateObligations<'db> { + self.engine.pending_obligations() + } + + pub fn deeply_normalize>>( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + value: T, + ) -> Result>> { + self.infcx.at(cause, param_env).deeply_normalize(value) + } + + pub fn structurally_normalize_ty( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + value: Ty<'db>, + ) -> Result, Vec>> { + self.infcx.at(cause, param_env).structurally_normalize_ty(value, &mut self.engine) + } + + pub fn structurally_normalize_const( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + value: Const<'db>, + ) -> Result, Vec>> { + self.infcx.at(cause, param_env).structurally_normalize_const(value, &mut self.engine) + } + + pub fn structurally_normalize_term( + &mut self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + value: Term<'db>, + ) -> Result, Vec>> { + self.infcx.at(cause, param_env).structurally_normalize_term(value, &mut self.engine) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs new file mode 100644 index 0000000000000..43589ab2ef139 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs @@ -0,0 +1,167 @@ +//! Things related to opaques in the next-trait-solver. + +use intern::Interned; +use rustc_ast_ir::try_visit; + +use crate::next_solver::SolverDefId; + +use super::{CanonicalVarKind, DbInterner, interned_vec_nolifetime_salsa}; + +pub type OpaqueTypeKey<'db> = rustc_type_ir::OpaqueTypeKey>; +pub type PredefinedOpaquesData<'db> = rustc_type_ir::solve::PredefinedOpaquesData>; +pub type ExternalConstraintsData<'db> = + rustc_type_ir::solve::ExternalConstraintsData>; + +#[salsa::interned(constructor = new_, debug)] +pub struct PredefinedOpaques<'db> { + #[returns(ref)] + kind_: rustc_type_ir::solve::PredefinedOpaquesData>, +} + +impl<'db> PredefinedOpaques<'db> { + pub fn new(interner: DbInterner<'db>, data: PredefinedOpaquesData<'db>) -> Self { + PredefinedOpaques::new_(interner.db(), data) + } + + pub fn inner(&self) -> &PredefinedOpaquesData<'db> { + salsa::with_attached_database(|db| { + let inner = self.kind_(db); + // SAFETY: ¯\_(ツ)_/¯ + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } +} + +impl<'db> rustc_type_ir::TypeVisitable> for PredefinedOpaques<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + self.opaque_types.visit_with(visitor) + } +} + +impl<'db> rustc_type_ir::TypeFoldable> for PredefinedOpaques<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(PredefinedOpaques::new( + folder.cx(), + PredefinedOpaquesData { + opaque_types: self + .opaque_types + .iter() + .cloned() + .map(|opaque| opaque.try_fold_with(folder)) + .collect::>()?, + }, + )) + } + fn fold_with>>(self, folder: &mut F) -> Self { + PredefinedOpaques::new( + folder.cx(), + PredefinedOpaquesData { + opaque_types: self + .opaque_types + .iter() + .cloned() + .map(|opaque| opaque.fold_with(folder)) + .collect(), + }, + ) + } +} + +impl<'db> std::ops::Deref for PredefinedOpaques<'db> { + type Target = PredefinedOpaquesData<'db>; + + fn deref(&self) -> &Self::Target { + self.inner() + } +} + +interned_vec_nolifetime_salsa!(SolverDefIds, SolverDefId); + +#[salsa::interned(constructor = new_, debug)] +pub struct ExternalConstraints<'db> { + #[returns(ref)] + kind_: rustc_type_ir::solve::ExternalConstraintsData>, +} + +impl<'db> ExternalConstraints<'db> { + pub fn new(interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self { + ExternalConstraints::new_(interner.db(), data) + } + + pub fn inner(&self) -> &ExternalConstraintsData<'db> { + salsa::with_attached_database(|db| { + let inner = self.kind_(db); + // SAFETY: ¯\_(ツ)_/¯ + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } +} + +impl<'db> std::ops::Deref for ExternalConstraints<'db> { + type Target = ExternalConstraintsData<'db>; + + fn deref(&self) -> &Self::Target { + self.inner() + } +} + +impl<'db> rustc_type_ir::TypeVisitable> for ExternalConstraints<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + try_visit!(self.region_constraints.visit_with(visitor)); + try_visit!(self.opaque_types.visit_with(visitor)); + self.normalization_nested_goals.visit_with(visitor) + } +} + +impl<'db> rustc_type_ir::TypeFoldable> for ExternalConstraints<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(ExternalConstraints::new( + folder.cx(), + ExternalConstraintsData { + region_constraints: self.region_constraints.clone().try_fold_with(folder)?, + opaque_types: self + .opaque_types + .iter() + .cloned() + .map(|opaque| opaque.try_fold_with(folder)) + .collect::>()?, + normalization_nested_goals: self + .normalization_nested_goals + .clone() + .try_fold_with(folder)?, + }, + )) + } + fn fold_with>>(self, folder: &mut F) -> Self { + ExternalConstraints::new( + folder.cx(), + ExternalConstraintsData { + region_constraints: self.region_constraints.clone().fold_with(folder), + opaque_types: self + .opaque_types + .iter() + .cloned() + .map(|opaque| opaque.fold_with(folder)) + .collect(), + normalization_nested_goals: self + .normalization_nested_goals + .clone() + .fold_with(folder), + }, + ) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs new file mode 100644 index 0000000000000..86545415009a0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs @@ -0,0 +1,904 @@ +//! Things related to predicates. + +use std::cmp::Ordering; + +use intern::Interned; +use rustc_ast_ir::try_visit; +use rustc_type_ir::{ + self as ty, CollectAndApply, DebruijnIndex, EarlyBinder, FlagComputation, Flags, + PredicatePolarity, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, + TypeVisitable, Upcast, UpcastFrom, VisitorResult, WithCachedTypeInfo, + elaborate::Elaboratable, + error::{ExpectedFound, TypeError}, + inherent::{IntoKind, SliceLike}, + relate::Relate, +}; +use smallvec::{SmallVec, smallvec}; + +use crate::next_solver::TraitIdWrapper; + +use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db}; + +pub type BoundExistentialPredicate<'db> = Binder<'db, ExistentialPredicate<'db>>; + +pub type TraitRef<'db> = ty::TraitRef>; +pub type AliasTerm<'db> = ty::AliasTerm>; +pub type ProjectionPredicate<'db> = ty::ProjectionPredicate>; +pub type ExistentialPredicate<'db> = ty::ExistentialPredicate>; +pub type ExistentialTraitRef<'db> = ty::ExistentialTraitRef>; +pub type ExistentialProjection<'db> = ty::ExistentialProjection>; +pub type TraitPredicate<'db> = ty::TraitPredicate>; +pub type ClauseKind<'db> = ty::ClauseKind>; +pub type PredicateKind<'db> = ty::PredicateKind>; +pub type NormalizesTo<'db> = ty::NormalizesTo>; +pub type CoercePredicate<'db> = ty::CoercePredicate>; +pub type SubtypePredicate<'db> = ty::SubtypePredicate>; +pub type OutlivesPredicate<'db, T> = ty::OutlivesPredicate, T>; +pub type RegionOutlivesPredicate<'db> = OutlivesPredicate<'db, Region<'db>>; +pub type TypeOutlivesPredicate<'db> = OutlivesPredicate<'db, Ty<'db>>; +pub type PolyTraitPredicate<'db> = Binder<'db, TraitPredicate<'db>>; +pub type PolyRegionOutlivesPredicate<'db> = Binder<'db, RegionOutlivesPredicate<'db>>; +pub type PolyTypeOutlivesPredicate<'db> = Binder<'db, TypeOutlivesPredicate<'db>>; +pub type PolySubtypePredicate<'db> = Binder<'db, SubtypePredicate<'db>>; +pub type PolyCoercePredicate<'db> = Binder<'db, CoercePredicate<'db>>; +pub type PolyProjectionPredicate<'db> = Binder<'db, ProjectionPredicate<'db>>; +pub type PolyTraitRef<'db> = Binder<'db, TraitRef<'db>>; +pub type PolyExistentialTraitRef<'db> = Binder<'db, ExistentialTraitRef<'db>>; +pub type PolyExistentialProjection<'db> = Binder<'db, ExistentialProjection<'db>>; + +/// Compares via an ordering that will not change if modules are reordered or other changes are +/// made to the tree. In particular, this ordering is preserved across incremental compilations. +fn stable_cmp_existential_predicate<'db>( + a: &ExistentialPredicate<'db>, + b: &ExistentialPredicate<'db>, +) -> Ordering { + // FIXME: this is actual unstable - see impl in predicate.rs in `rustc_middle` + match (a, b) { + (ExistentialPredicate::Trait(_), ExistentialPredicate::Trait(_)) => Ordering::Equal, + (ExistentialPredicate::Projection(a), ExistentialPredicate::Projection(b)) => { + // Should sort by def path hash + Ordering::Equal + } + (ExistentialPredicate::AutoTrait(a), ExistentialPredicate::AutoTrait(b)) => { + // Should sort by def path hash + Ordering::Equal + } + (ExistentialPredicate::Trait(_), _) => Ordering::Less, + (ExistentialPredicate::Projection(_), ExistentialPredicate::Trait(_)) => Ordering::Greater, + (ExistentialPredicate::Projection(_), _) => Ordering::Less, + (ExistentialPredicate::AutoTrait(_), _) => Ordering::Greater, + } +} +interned_vec_db!(BoundExistentialPredicates, BoundExistentialPredicate); + +impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates> + for BoundExistentialPredicates<'db> +{ + fn principal_def_id(self) -> Option { + self.principal().map(|trait_ref| trait_ref.skip_binder().def_id) + } + + fn principal( + self, + ) -> Option< + rustc_type_ir::Binder, rustc_type_ir::ExistentialTraitRef>>, + > { + self.inner()[0] + .map_bound(|this| match this { + ExistentialPredicate::Trait(tr) => Some(tr), + _ => None, + }) + .transpose() + } + + fn auto_traits(self) -> impl IntoIterator { + self.iter().filter_map(|predicate| match predicate.skip_binder() { + ExistentialPredicate::AutoTrait(did) => Some(did), + _ => None, + }) + } + + fn projection_bounds( + self, + ) -> impl IntoIterator< + Item = rustc_type_ir::Binder< + DbInterner<'db>, + rustc_type_ir::ExistentialProjection>, + >, + > { + self.iter().filter_map(|predicate| { + predicate + .map_bound(|pred| match pred { + ExistentialPredicate::Projection(projection) => Some(projection), + _ => None, + }) + .transpose() + }) + } +} + +impl<'db> rustc_type_ir::relate::Relate> for BoundExistentialPredicates<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + let interner = relation.cx(); + + // We need to perform this deduplication as we sometimes generate duplicate projections in `a`. + let mut a_v: Vec<_> = a.into_iter().collect(); + let mut b_v: Vec<_> = b.into_iter().collect(); + // `skip_binder` here is okay because `stable_cmp` doesn't look at binders + a_v.sort_by(|a, b| { + stable_cmp_existential_predicate(a.as_ref().skip_binder(), b.as_ref().skip_binder()) + }); + a_v.dedup(); + b_v.sort_by(|a, b| { + stable_cmp_existential_predicate(a.as_ref().skip_binder(), b.as_ref().skip_binder()) + }); + b_v.dedup(); + if a_v.len() != b_v.len() { + return Err(TypeError::ExistentialMismatch(ExpectedFound::new(a, b))); + } + + let v = std::iter::zip(a_v, b_v).map( + |(ep_a, ep_b): ( + Binder<'_, ty::ExistentialPredicate<_>>, + Binder<'_, ty::ExistentialPredicate<_>>, + )| { + match (ep_a.skip_binder(), ep_b.skip_binder()) { + (ty::ExistentialPredicate::Trait(a), ty::ExistentialPredicate::Trait(b)) => { + Ok(ep_a.rebind(ty::ExistentialPredicate::Trait( + relation.relate(ep_a.rebind(a), ep_b.rebind(b))?.skip_binder(), + ))) + } + ( + ty::ExistentialPredicate::Projection(a), + ty::ExistentialPredicate::Projection(b), + ) => Ok(ep_a.rebind(ty::ExistentialPredicate::Projection( + relation.relate(ep_a.rebind(a), ep_b.rebind(b))?.skip_binder(), + ))), + ( + ty::ExistentialPredicate::AutoTrait(a), + ty::ExistentialPredicate::AutoTrait(b), + ) if a == b => Ok(ep_a.rebind(ty::ExistentialPredicate::AutoTrait(a))), + _ => Err(TypeError::ExistentialMismatch(ExpectedFound::new(a, b))), + } + }, + ); + + CollectAndApply::collect_and_apply(v, |g| { + BoundExistentialPredicates::new_from_iter(interner, g.iter().cloned()) + }) + } +} + +#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] +pub struct InternedWrapperNoDebug(pub(crate) T); + +#[salsa::interned(constructor = new_)] +pub struct Predicate<'db> { + #[returns(ref)] + kind_: InternedWrapperNoDebug>>>, +} + +impl<'db> std::fmt::Debug for Predicate<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.inner().internee.fmt(f) + } +} + +impl<'db> std::fmt::Debug + for InternedWrapperNoDebug>>> +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Binder<")?; + match self.0.internee.skip_binder() { + rustc_type_ir::PredicateKind::Clause(clause_kind) => { + write!(f, "{clause_kind:?}") + } + rustc_type_ir::PredicateKind::DynCompatible(trait_def_id) => { + write!(f, "the trait `{trait_def_id:?}` is dyn-compatible") + } + rustc_type_ir::PredicateKind::Subtype(subtype_predicate) => { + write!(f, "{subtype_predicate:?}") + } + rustc_type_ir::PredicateKind::Coerce(coerce_predicate) => { + write!(f, "{coerce_predicate:?}") + } + rustc_type_ir::PredicateKind::ConstEquate(c1, c2) => { + write!(f, "the constant `{c1:?}` equals `{c2:?}`") + } + rustc_type_ir::PredicateKind::Ambiguous => write!(f, "ambiguous"), + rustc_type_ir::PredicateKind::NormalizesTo(data) => write!(f, "{data:?}"), + rustc_type_ir::PredicateKind::AliasRelate(t1, t2, dir) => { + write!(f, "{t1:?} {dir:?} {t2:?}") + } + }?; + write!(f, ", [{:?}]>", self.0.internee.bound_vars())?; + Ok(()) + } +} + +impl<'db> Predicate<'db> { + pub fn new(interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self { + let flags = FlagComputation::for_predicate(kind); + let cached = WithCachedTypeInfo { + internee: kind, + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + #[cfg(feature = "in-rust-tree")] + stable_hash: ena::fingerprint::Fingerprint::ZERO, + }; + Predicate::new_(interner.db(), InternedWrapperNoDebug(cached)) + } + + pub fn inner(&self) -> &WithCachedTypeInfo>> { + salsa::with_attached_database(|db| { + let inner = &self.kind_(db).0; + // SAFETY: The caller already has access to a `Predicate<'db>`, so borrowchecking will + // make sure that our returned value is valid for the lifetime `'db`. + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } + + /// Flips the polarity of a Predicate. + /// + /// Given `T: Trait` predicate it returns `T: !Trait` and given `T: !Trait` returns `T: Trait`. + pub fn flip_polarity(self) -> Option> { + let kind = self + .kind() + .map_bound(|kind| match kind { + PredicateKind::Clause(ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity, + })) => Some(PredicateKind::Clause(ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: polarity.flip(), + }))), + + _ => None, + }) + .transpose()?; + + Some(Predicate::new(DbInterner::conjure(), kind)) + } +} + +// FIXME: should make a "header" in interned_vec + +#[derive(Debug, Clone)] +pub struct InternedClausesWrapper<'db>(SmallVec<[Clause<'db>; 2]>, TypeFlags, DebruijnIndex); + +impl<'db> PartialEq for InternedClausesWrapper<'db> { + fn eq(&self, other: &Self) -> bool { + self.0.eq(&other.0) + } +} + +impl<'db> Eq for InternedClausesWrapper<'db> {} + +impl<'db> std::hash::Hash for InternedClausesWrapper<'db> { + fn hash(&self, state: &mut H) { + self.0.hash(state) + } +} + +type InternedClauses<'db> = Interned>; + +#[salsa::interned(constructor = new_)] +pub struct Clauses<'db> { + #[returns(ref)] + inner_: InternedClausesWrapper<'db>, +} + +impl<'db> Clauses<'db> { + pub fn new_from_iter( + interner: DbInterner<'db>, + data: impl IntoIterator>, + ) -> Self { + let clauses: SmallVec<_> = data.into_iter().collect(); + let flags = FlagComputation::>::for_clauses(&clauses); + let wrapper = InternedClausesWrapper(clauses, flags.flags, flags.outer_exclusive_binder); + Clauses::new_(interner.db(), wrapper) + } + + pub fn inner(&self) -> &InternedClausesWrapper<'db> { + salsa::with_attached_database(|db| { + let inner = self.inner_(db); + // SAFETY: The caller already has access to a `Clauses<'db>`, so borrowchecking will + // make sure that our returned value is valid for the lifetime `'db`. + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } +} + +impl<'db> std::fmt::Debug for Clauses<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.inner().0.fmt(f) + } +} + +impl<'db> rustc_type_ir::inherent::Clauses> for Clauses<'db> {} + +impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> { + type Item = Clause<'db>; + + type IntoIter = ; 2]> as IntoIterator>::IntoIter; + + fn iter(self) -> Self::IntoIter { + self.inner().0.clone().into_iter() + } + + fn as_slice(&self) -> &[Self::Item] { + self.inner().0.as_slice() + } +} + +impl<'db> IntoIterator for Clauses<'db> { + type Item = Clause<'db>; + type IntoIter = ::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + rustc_type_ir::inherent::SliceLike::iter(self) + } +} + +impl<'db> Default for Clauses<'db> { + fn default() -> Self { + Clauses::new_from_iter(DbInterner::conjure(), []) + } +} + +impl<'db> rustc_type_ir::TypeSuperFoldable> for Clauses<'db> { + fn try_super_fold_with>>( + self, + folder: &mut F, + ) -> Result { + let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len()); + for c in self { + clauses.push(c.try_fold_with(folder)?); + } + Ok(Clauses::new_from_iter(folder.cx(), clauses)) + } + + fn super_fold_with>>( + self, + folder: &mut F, + ) -> Self { + let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len()); + for c in self { + clauses.push(c.fold_with(folder)); + } + Clauses::new_from_iter(folder.cx(), clauses) + } +} + +impl<'db> rustc_type_ir::TypeFoldable> for Clauses<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + use rustc_type_ir::inherent::SliceLike as _; + let inner: smallvec::SmallVec<[_; 2]> = + self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; + Ok(Clauses::new_from_iter(folder.cx(), inner)) + } + fn fold_with>>(self, folder: &mut F) -> Self { + use rustc_type_ir::inherent::SliceLike as _; + let inner: smallvec::SmallVec<[_; 2]> = self.iter().map(|v| v.fold_with(folder)).collect(); + Clauses::new_from_iter(folder.cx(), inner) + } +} + +impl<'db> rustc_type_ir::TypeVisitable> for Clauses<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + use rustc_ast_ir::visit::VisitorResult; + use rustc_type_ir::inherent::SliceLike as _; + rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); + V::Result::output() + } +} + +impl<'db> rustc_type_ir::Flags for Clauses<'db> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + self.inner().1 + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + self.inner().2 + } +} + +impl<'db> rustc_type_ir::TypeSuperVisitable> for Clauses<'db> { + fn super_visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + self.as_slice().visit_with(visitor) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] // TODO implement Debug by hand +pub struct Clause<'db>(pub(crate) Predicate<'db>); + +// We could cram the reveal into the clauses like rustc does, probably +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub struct ParamEnv<'db> { + pub(crate) clauses: Clauses<'db>, +} + +impl<'db> ParamEnv<'db> { + pub fn empty() -> Self { + ParamEnv { clauses: Clauses::new_from_iter(DbInterner::conjure(), []) } + } +} + +impl<'db> TypeVisitable> for ParamEnv<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + try_visit!(self.clauses.visit_with(visitor)); + V::Result::output() + } +} + +impl<'db> TypeFoldable> for ParamEnv<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(ParamEnv { clauses: self.clauses.try_fold_with(folder)? }) + } + fn fold_with>>(self, folder: &mut F) -> Self { + ParamEnv { clauses: self.clauses.fold_with(folder) } + } +} + +impl<'db> rustc_type_ir::inherent::ParamEnv> for ParamEnv<'db> { + fn caller_bounds(self) -> impl rustc_type_ir::inherent::SliceLike> { + self.clauses + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct ParamEnvAnd<'db, T> { + pub param_env: ParamEnv<'db>, + pub value: T, +} + +impl<'db, T> ParamEnvAnd<'db, T> { + pub fn into_parts(self) -> (ParamEnv<'db>, T) { + (self.param_env, self.value) + } +} + +impl<'db> TypeVisitable> for Predicate<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + visitor.visit_predicate(*self) + } +} + +impl<'db> TypeSuperVisitable> for Predicate<'db> { + fn super_visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + (*self).kind().visit_with(visitor) + } +} + +impl<'db> TypeFoldable> for Predicate<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + folder.try_fold_predicate(self) + } + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_predicate(self) + } +} + +impl<'db> TypeSuperFoldable> for Predicate<'db> { + fn try_super_fold_with>>( + self, + folder: &mut F, + ) -> Result { + let new = self.kind().try_fold_with(folder)?; + Ok(Predicate::new(folder.cx(), new)) + } + fn super_fold_with>>( + self, + folder: &mut F, + ) -> Self { + let new = self.kind().fold_with(folder); + Predicate::new(folder.cx(), new) + } +} + +impl<'db> Elaboratable> for Predicate<'db> { + fn predicate(&self) -> as rustc_type_ir::Interner>::Predicate { + *self + } + + fn child(&self, clause: as rustc_type_ir::Interner>::Clause) -> Self { + clause.as_predicate() + } + + fn child_with_derived_cause( + &self, + clause: as rustc_type_ir::Interner>::Clause, + _span: as rustc_type_ir::Interner>::Span, + _parent_trait_pred: rustc_type_ir::Binder< + DbInterner<'db>, + rustc_type_ir::TraitPredicate>, + >, + _index: usize, + ) -> Self { + clause.as_predicate() + } +} + +impl<'db> Flags for Predicate<'db> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + self.inner().flags + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + self.inner().outer_exclusive_binder + } +} + +impl<'db> IntoKind for Predicate<'db> { + type Kind = Binder<'db, PredicateKind<'db>>; + + fn kind(self) -> Self::Kind { + self.inner().internee + } +} + +impl<'db> UpcastFrom, ty::PredicateKind>> for Predicate<'db> { + fn upcast_from(from: ty::PredicateKind>, interner: DbInterner<'db>) -> Self { + Binder::dummy(from).upcast(interner) + } +} +impl<'db> + UpcastFrom, ty::Binder, ty::PredicateKind>>> + for Predicate<'db> +{ + fn upcast_from( + from: ty::Binder, ty::PredicateKind>>, + interner: DbInterner<'db>, + ) -> Self { + Predicate::new(interner, from) + } +} +impl<'db> UpcastFrom, ty::ClauseKind>> for Predicate<'db> { + fn upcast_from(from: ty::ClauseKind>, interner: DbInterner<'db>) -> Self { + Binder::dummy(PredicateKind::Clause(from)).upcast(interner) + } +} +impl<'db> UpcastFrom, ty::Binder, ty::ClauseKind>>> + for Predicate<'db> +{ + fn upcast_from( + from: ty::Binder, ty::ClauseKind>>, + interner: DbInterner<'db>, + ) -> Self { + from.map_bound(PredicateKind::Clause).upcast(interner) + } +} +impl<'db> UpcastFrom, Clause<'db>> for Predicate<'db> { + fn upcast_from(from: Clause<'db>, _interner: DbInterner<'db>) -> Self { + from.0 + } +} +impl<'db> UpcastFrom, ty::NormalizesTo>> for Predicate<'db> { + fn upcast_from(from: ty::NormalizesTo>, interner: DbInterner<'db>) -> Self { + PredicateKind::NormalizesTo(from).upcast(interner) + } +} +impl<'db> UpcastFrom, ty::TraitRef>> for Predicate<'db> { + fn upcast_from(from: ty::TraitRef>, interner: DbInterner<'db>) -> Self { + Binder::dummy(from).upcast(interner) + } +} +impl<'db> UpcastFrom, ty::Binder, ty::TraitRef>>> + for Predicate<'db> +{ + fn upcast_from( + from: ty::Binder, ty::TraitRef>>, + interner: DbInterner<'db>, + ) -> Self { + from.map_bound(|trait_ref| TraitPredicate { + trait_ref, + polarity: PredicatePolarity::Positive, + }) + .upcast(interner) + } +} +impl<'db> UpcastFrom, Binder<'db, ty::TraitPredicate>>> + for Predicate<'db> +{ + fn upcast_from( + from: Binder<'db, ty::TraitPredicate>>, + interner: DbInterner<'db>, + ) -> Self { + from.map_bound(|it| PredicateKind::Clause(ClauseKind::Trait(it))).upcast(interner) + } +} +impl<'db> UpcastFrom, Binder<'db, ProjectionPredicate<'db>>> for Predicate<'db> { + fn upcast_from(from: Binder<'db, ProjectionPredicate<'db>>, interner: DbInterner<'db>) -> Self { + from.map_bound(|it| PredicateKind::Clause(ClauseKind::Projection(it))).upcast(interner) + } +} +impl<'db> UpcastFrom, ProjectionPredicate<'db>> for Predicate<'db> { + fn upcast_from(from: ProjectionPredicate<'db>, interner: DbInterner<'db>) -> Self { + PredicateKind::Clause(ClauseKind::Projection(from)).upcast(interner) + } +} +impl<'db> UpcastFrom, ty::TraitPredicate>> for Predicate<'db> { + fn upcast_from(from: ty::TraitPredicate>, interner: DbInterner<'db>) -> Self { + PredicateKind::Clause(ClauseKind::Trait(from)).upcast(interner) + } +} +impl<'db> UpcastFrom, ty::OutlivesPredicate, Ty<'db>>> + for Predicate<'db> +{ + fn upcast_from( + from: ty::OutlivesPredicate, Ty<'db>>, + interner: DbInterner<'db>, + ) -> Self { + PredicateKind::Clause(ClauseKind::TypeOutlives(from)).upcast(interner) + } +} +impl<'db> UpcastFrom, ty::OutlivesPredicate, Region<'db>>> + for Predicate<'db> +{ + fn upcast_from( + from: ty::OutlivesPredicate, Region<'db>>, + interner: DbInterner<'db>, + ) -> Self { + PredicateKind::Clause(ClauseKind::RegionOutlives(from)).upcast(interner) + } +} + +impl<'db> UpcastFrom, PolyRegionOutlivesPredicate<'db>> for Predicate<'db> { + fn upcast_from(from: PolyRegionOutlivesPredicate<'db>, tcx: DbInterner<'db>) -> Self { + from.map_bound(|p| PredicateKind::Clause(ClauseKind::RegionOutlives(p))).upcast(tcx) + } +} + +impl<'db> rustc_type_ir::inherent::Predicate> for Predicate<'db> { + fn as_clause(self) -> Option< as rustc_type_ir::Interner>::Clause> { + match self.kind().skip_binder() { + PredicateKind::Clause(..) => Some(self.expect_clause()), + _ => None, + } + } + + /// Whether this projection can be soundly normalized. + /// + /// Wf predicates must not be normalized, as normalization + /// can remove required bounds which would cause us to + /// unsoundly accept some programs. See #91068. + fn allow_normalization(self) -> bool { + // TODO: this should probably live in rustc_type_ir + match self.inner().as_ref().skip_binder() { + PredicateKind::Clause(ClauseKind::WellFormed(_)) + | PredicateKind::AliasRelate(..) + | PredicateKind::NormalizesTo(..) => false, + PredicateKind::Clause(ClauseKind::Trait(_)) + | PredicateKind::Clause(ClauseKind::RegionOutlives(_)) + | PredicateKind::Clause(ClauseKind::TypeOutlives(_)) + | PredicateKind::Clause(ClauseKind::Projection(_)) + | PredicateKind::Clause(ClauseKind::ConstArgHasType(..)) + | PredicateKind::Clause(ClauseKind::HostEffect(..)) + | PredicateKind::Clause(ClauseKind::UnstableFeature(_)) + | PredicateKind::DynCompatible(_) + | PredicateKind::Subtype(_) + | PredicateKind::Coerce(_) + | PredicateKind::Clause(ClauseKind::ConstEvaluatable(_)) + | PredicateKind::ConstEquate(_, _) + | PredicateKind::Ambiguous => true, + } + } +} + +impl<'db> Predicate<'db> { + pub fn as_trait_clause(self) -> Option> { + let predicate = self.kind(); + match predicate.skip_binder() { + PredicateKind::Clause(ClauseKind::Trait(t)) => Some(predicate.rebind(t)), + _ => None, + } + } + + pub fn as_projection_clause(self) -> Option> { + let predicate = self.kind(); + match predicate.skip_binder() { + PredicateKind::Clause(ClauseKind::Projection(t)) => Some(predicate.rebind(t)), + _ => None, + } + } + + /// Matches a `PredicateKind::Clause` and turns it into a `Clause`, otherwise returns `None`. + pub fn as_clause(self) -> Option> { + match self.kind().skip_binder() { + PredicateKind::Clause(..) => Some(self.expect_clause()), + _ => None, + } + } + + /// Assert that the predicate is a clause. + pub fn expect_clause(self) -> Clause<'db> { + match self.kind().skip_binder() { + PredicateKind::Clause(..) => Clause(self), + _ => panic!("{self:?} is not a clause"), + } + } +} + +impl<'db> TypeVisitable> for Clause<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + visitor.visit_predicate((*self).as_predicate()) + } +} + +impl<'db> TypeFoldable> for Clause<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(folder.try_fold_predicate(self.as_predicate())?.expect_clause()) + } + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_predicate(self.as_predicate()).expect_clause() + } +} + +impl<'db> IntoKind for Clause<'db> { + type Kind = Binder<'db, ClauseKind<'db>>; + + fn kind(self) -> Self::Kind { + self.0.kind().map_bound(|pk| match pk { + PredicateKind::Clause(kind) => kind, + _ => unreachable!(), + }) + } +} + +impl<'db> Clause<'db> { + pub fn as_predicate(self) -> Predicate<'db> { + self.0 + } +} + +impl<'db> Elaboratable> for Clause<'db> { + fn predicate(&self) -> as rustc_type_ir::Interner>::Predicate { + self.0 + } + + fn child(&self, clause: as rustc_type_ir::Interner>::Clause) -> Self { + clause + } + + fn child_with_derived_cause( + &self, + clause: as rustc_type_ir::Interner>::Clause, + _span: as rustc_type_ir::Interner>::Span, + _parent_trait_pred: rustc_type_ir::Binder< + DbInterner<'db>, + rustc_type_ir::TraitPredicate>, + >, + _index: usize, + ) -> Self { + clause + } +} + +impl<'db> UpcastFrom, ty::Binder, ty::ClauseKind>>> + for Clause<'db> +{ + fn upcast_from( + from: ty::Binder, ty::ClauseKind>>, + interner: DbInterner<'db>, + ) -> Self { + Clause(from.map_bound(PredicateKind::Clause).upcast(interner)) + } +} +impl<'db> UpcastFrom, ty::TraitRef>> for Clause<'db> { + fn upcast_from(from: ty::TraitRef>, interner: DbInterner<'db>) -> Self { + Clause(from.upcast(interner)) + } +} +impl<'db> UpcastFrom, ty::Binder, ty::TraitRef>>> + for Clause<'db> +{ + fn upcast_from( + from: ty::Binder, ty::TraitRef>>, + interner: DbInterner<'db>, + ) -> Self { + Clause(from.upcast(interner)) + } +} +impl<'db> UpcastFrom, ty::TraitPredicate>> for Clause<'db> { + fn upcast_from(from: ty::TraitPredicate>, interner: DbInterner<'db>) -> Self { + Clause(from.upcast(interner)) + } +} +impl<'db> + UpcastFrom, ty::Binder, ty::TraitPredicate>>> + for Clause<'db> +{ + fn upcast_from( + from: ty::Binder, ty::TraitPredicate>>, + interner: DbInterner<'db>, + ) -> Self { + Clause(from.upcast(interner)) + } +} +impl<'db> UpcastFrom, ty::ProjectionPredicate>> for Clause<'db> { + fn upcast_from( + from: ty::ProjectionPredicate>, + interner: DbInterner<'db>, + ) -> Self { + Clause(from.upcast(interner)) + } +} +impl<'db> + UpcastFrom< + DbInterner<'db>, + ty::Binder, ty::ProjectionPredicate>>, + > for Clause<'db> +{ + fn upcast_from( + from: ty::Binder, ty::ProjectionPredicate>>, + interner: DbInterner<'db>, + ) -> Self { + Clause(from.upcast(interner)) + } +} + +impl<'db> rustc_type_ir::inherent::Clause> for Clause<'db> { + fn as_predicate(self) -> as rustc_type_ir::Interner>::Predicate { + self.0 + } + + fn instantiate_supertrait( + self, + cx: DbInterner<'db>, + trait_ref: rustc_type_ir::Binder, rustc_type_ir::TraitRef>>, + ) -> Self { + tracing::debug!(?self, ?trait_ref); + // See the rustc impl for a long comment + let bound_pred = self.kind(); + let pred_bound_vars = bound_pred.bound_vars(); + let trait_bound_vars = trait_ref.bound_vars(); + // 1) Self: Bar1<'a, '^0.0> -> Self: Bar1<'a, '^0.1> + let shifted_pred = + cx.shift_bound_var_indices(trait_bound_vars.len(), bound_pred.skip_binder()); + // 2) Self: Bar1<'a, '^0.1> -> T: Bar1<'^0.0, '^0.1> + let new = EarlyBinder::bind(shifted_pred).instantiate(cx, trait_ref.skip_binder().args); + // 3) ['x] + ['b] -> ['x, 'b] + let bound_vars = + BoundVarKinds::new_from_iter(cx, trait_bound_vars.iter().chain(pred_bound_vars.iter())); + + let predicate: Predicate<'db> = + ty::Binder::bind_with_vars(PredicateKind::Clause(new), bound_vars).upcast(cx); + predicate.expect_clause() + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs new file mode 100644 index 0000000000000..d6214d991560a --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs @@ -0,0 +1,335 @@ +//! Things related to regions. + +use hir_def::LifetimeParamId; +use intern::{Interned, Symbol}; +use rustc_type_ir::{ + BoundVar, Flags, INNERMOST, RegionVid, TypeFlags, TypeFoldable, TypeVisitable, VisitorResult, + inherent::{IntoKind, PlaceholderLike, SliceLike}, + relate::Relate, +}; + +use crate::next_solver::{GenericArg, OutlivesPredicate}; + +use super::{ + ErrorGuaranteed, SolverDefId, interned_vec_db, + interner::{BoundVarKind, DbInterner, Placeholder}, +}; + +type RegionKind<'db> = rustc_type_ir::RegionKind>; + +#[salsa::interned(constructor = new_, debug)] +pub struct Region<'db> { + #[returns(ref)] + kind_: RegionKind<'db>, +} + +impl<'db> Region<'db> { + pub fn new(interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self { + Region::new_(interner.db(), kind) + } + + pub fn inner(&self) -> &RegionKind<'db> { + salsa::with_attached_database(|db| { + let inner = self.kind_(db); + // SAFETY: The caller already has access to a `Region<'db>`, so borrowchecking will + // make sure that our returned value is valid for the lifetime `'db`. + unsafe { std::mem::transmute::<&RegionKind<'_>, &RegionKind<'db>>(inner) } + }) + .unwrap() + } + + pub fn new_early_param( + interner: DbInterner<'db>, + early_bound_region: EarlyParamRegion, + ) -> Self { + Region::new(interner, RegionKind::ReEarlyParam(early_bound_region)) + } + + pub fn new_placeholder(interner: DbInterner<'db>, placeholder: PlaceholderRegion) -> Self { + Region::new(interner, RegionKind::RePlaceholder(placeholder)) + } + + pub fn new_var(interner: DbInterner<'db>, v: RegionVid) -> Region<'db> { + Region::new(interner, RegionKind::ReVar(v)) + } + + pub fn is_placeholder(&self) -> bool { + matches!(self.inner(), RegionKind::RePlaceholder(..)) + } + + pub fn is_static(&self) -> bool { + matches!(self.inner(), RegionKind::ReStatic) + } + + pub fn error(interner: DbInterner<'db>) -> Self { + Region::new(interner, RegionKind::ReError(ErrorGuaranteed)) + } + + pub fn type_flags(&self) -> TypeFlags { + let mut flags = TypeFlags::empty(); + + match &self.inner() { + RegionKind::ReVar(..) => { + flags |= TypeFlags::HAS_FREE_REGIONS; + flags |= TypeFlags::HAS_FREE_LOCAL_REGIONS; + flags |= TypeFlags::HAS_RE_INFER; + } + RegionKind::RePlaceholder(..) => { + flags |= TypeFlags::HAS_FREE_REGIONS; + flags |= TypeFlags::HAS_FREE_LOCAL_REGIONS; + flags |= TypeFlags::HAS_RE_PLACEHOLDER; + } + RegionKind::ReEarlyParam(..) => { + flags |= TypeFlags::HAS_FREE_REGIONS; + flags |= TypeFlags::HAS_FREE_LOCAL_REGIONS; + flags |= TypeFlags::HAS_RE_PARAM; + } + RegionKind::ReLateParam(..) => { + flags |= TypeFlags::HAS_FREE_REGIONS; + flags |= TypeFlags::HAS_FREE_LOCAL_REGIONS; + } + RegionKind::ReStatic => { + flags |= TypeFlags::HAS_FREE_REGIONS; + } + RegionKind::ReBound(..) => { + flags |= TypeFlags::HAS_RE_BOUND; + } + RegionKind::ReErased => { + flags |= TypeFlags::HAS_RE_ERASED; + } + RegionKind::ReError(..) => { + flags |= TypeFlags::HAS_FREE_REGIONS; + flags |= TypeFlags::HAS_ERROR; + } + } + + flags + } +} + +pub type PlaceholderRegion = Placeholder; + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct EarlyParamRegion { + // FIXME: See `ParamTy`. + pub id: LifetimeParamId, + pub index: u32, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +/// The parameter representation of late-bound function parameters, "some region +/// at least as big as the scope `fr.scope`". +/// +/// Similar to a placeholder region as we create `LateParam` regions when entering a binder +/// except they are always in the root universe and instead of using a boundvar to distinguish +/// between others we use the `DefId` of the parameter. For this reason the `bound_region` field +/// should basically always be `BoundRegionKind::Named` as otherwise there is no way of telling +/// different parameters apart. +pub struct LateParamRegion { + pub scope: SolverDefId, + pub bound_region: BoundRegionKind, +} + +impl std::fmt::Debug for LateParamRegion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "ReLateParam({:?}, {:?})", self.scope, self.bound_region) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub enum BoundRegionKind { + /// An anonymous region parameter for a given fn (&T) + Anon, + + /// Named region parameters for functions (a in &'a T) + /// + /// The `DefId` is needed to distinguish free regions in + /// the event of shadowing. + Named(SolverDefId), + + /// Anonymous region for the implicit env pointer parameter + /// to a closure + ClosureEnv, +} + +impl std::fmt::Debug for BoundRegionKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match *self { + BoundRegionKind::Anon => write!(f, "BrAnon"), + BoundRegionKind::Named(did) => { + write!(f, "BrNamed({did:?})") + } + BoundRegionKind::ClosureEnv => write!(f, "BrEnv"), + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct BoundRegion { + pub var: BoundVar, + pub kind: BoundRegionKind, +} + +impl rustc_type_ir::inherent::ParamLike for EarlyParamRegion { + fn index(self) -> u32 { + self.index + } +} + +impl std::fmt::Debug for EarlyParamRegion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "#{}", self.index) + // write!(f, "{}/#{}", self.name, self.index) + } +} + +impl<'db> rustc_type_ir::inherent::BoundVarLike> for BoundRegion { + fn var(self) -> BoundVar { + self.var + } + + fn assert_eq(self, var: BoundVarKind) { + assert_eq!(self.kind, var.expect_region()) + } +} + +impl core::fmt::Debug for BoundRegion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self.kind { + BoundRegionKind::Anon => write!(f, "{:?}", self.var), + BoundRegionKind::ClosureEnv => write!(f, "{:?}.Env", self.var), + BoundRegionKind::Named(def) => { + write!(f, "{:?}.Named({:?})", self.var, def) + } + } + } +} + +impl BoundRegionKind { + pub fn is_named(&self) -> bool { + matches!(self, BoundRegionKind::Named(_)) + } + + pub fn get_name(&self) -> Option { + None + } + + pub fn get_id(&self) -> Option { + match self { + BoundRegionKind::Named(id) => Some(*id), + _ => None, + } + } +} + +impl<'db> IntoKind for Region<'db> { + type Kind = RegionKind<'db>; + + fn kind(self) -> Self::Kind { + *self.inner() + } +} + +impl<'db> TypeVisitable> for Region<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + visitor.visit_region(*self) + } +} + +impl<'db> TypeFoldable> for Region<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + folder.try_fold_region(self) + } + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_region(self) + } +} + +impl<'db> Relate> for Region<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + relation.regions(a, b) + } +} + +impl<'db> Flags for Region<'db> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + self.type_flags() + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + match &self.inner() { + RegionKind::ReBound(debruijn, _) => debruijn.shifted_in(1), + _ => INNERMOST, + } + } +} + +impl<'db> rustc_type_ir::inherent::Region> for Region<'db> { + fn new_bound( + interner: DbInterner<'db>, + debruijn: rustc_type_ir::DebruijnIndex, + var: BoundRegion, + ) -> Self { + Region::new(interner, RegionKind::ReBound(debruijn, var)) + } + + fn new_anon_bound( + interner: DbInterner<'db>, + debruijn: rustc_type_ir::DebruijnIndex, + var: rustc_type_ir::BoundVar, + ) -> Self { + Region::new( + interner, + RegionKind::ReBound(debruijn, BoundRegion { var, kind: BoundRegionKind::Anon }), + ) + } + + fn new_static(interner: DbInterner<'db>) -> Self { + Region::new(interner, RegionKind::ReStatic) + } + + fn new_placeholder( + interner: DbInterner<'db>, + var: as rustc_type_ir::Interner>::PlaceholderRegion, + ) -> Self { + Region::new(interner, RegionKind::RePlaceholder(var)) + } +} + +impl<'db> PlaceholderLike> for PlaceholderRegion { + type Bound = BoundRegion; + + fn universe(self) -> rustc_type_ir::UniverseIndex { + self.universe + } + + fn var(self) -> rustc_type_ir::BoundVar { + self.bound.var + } + + fn with_updated_universe(self, ui: rustc_type_ir::UniverseIndex) -> Self { + Placeholder { universe: ui, bound: self.bound } + } + + fn new(ui: rustc_type_ir::UniverseIndex, bound: Self::Bound) -> Self { + Placeholder { universe: ui, bound } + } + + fn new_anon(ui: rustc_type_ir::UniverseIndex, var: rustc_type_ir::BoundVar) -> Self { + Placeholder { universe: ui, bound: BoundRegion { var, kind: BoundRegionKind::Anon } } + } +} + +type GenericArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>; + +interned_vec_db!(RegionAssumptions, GenericArgOutlivesPredicate); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs new file mode 100644 index 0000000000000..946e57e6cb741 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs @@ -0,0 +1,289 @@ +//! Defining `SolverContext` for next-trait-solver. + +use hir_def::{AssocItemId, GeneralConstId, TypeAliasId}; +use rustc_next_trait_solver::delegate::SolverDelegate; +use rustc_type_ir::GenericArgKind; +use rustc_type_ir::lang_items::SolverTraitLangItem; +use rustc_type_ir::{ + InferCtxtLike, Interner, PredicatePolarity, TypeFlags, TypeVisitableExt, UniverseIndex, + inherent::{IntoKind, SliceLike, Span as _, Term as _, Ty as _}, + solve::{Certainty, NoSolution}, +}; + +use crate::next_solver::mapping::NextSolverToChalk; +use crate::next_solver::{CanonicalVarKind, ImplIdWrapper}; +use crate::{ + TraitRefExt, + db::HirDatabase, + next_solver::{ + ClauseKind, CoercePredicate, PredicateKind, SubtypePredicate, mapping::ChalkToNextSolver, + util::sizedness_fast_path, + }, +}; + +use super::{ + Canonical, CanonicalVarValues, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, + ParamEnv, Predicate, SolverDefId, Span, Ty, UnevaluatedConst, + infer::{DbInternerInferExt, InferCtxt, canonical::instantiate::CanonicalExt}, +}; + +pub type Goal<'db, P> = rustc_type_ir::solve::Goal, P>; + +#[repr(transparent)] +pub(crate) struct SolverContext<'db>(pub(crate) InferCtxt<'db>); + +impl<'a, 'db> From<&'a InferCtxt<'db>> for &'a SolverContext<'db> { + fn from(infcx: &'a InferCtxt<'db>) -> Self { + // SAFETY: `repr(transparent)` + unsafe { std::mem::transmute(infcx) } + } +} + +impl<'db> std::ops::Deref for SolverContext<'db> { + type Target = InferCtxt<'db>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<'db> SolverDelegate for SolverContext<'db> { + type Interner = DbInterner<'db>; + type Infcx = InferCtxt<'db>; + + fn cx(&self) -> Self::Interner { + self.0.interner + } + + fn build_with_canonical( + cx: Self::Interner, + canonical: &rustc_type_ir::CanonicalQueryInput, + ) -> (Self, V, rustc_type_ir::CanonicalVarValues) + where + V: rustc_type_ir::TypeFoldable, + { + let (infcx, value, vars) = cx.infer_ctxt().build_with_canonical(canonical); + (SolverContext(infcx), value, vars) + } + + fn fresh_var_for_kind_with_span(&self, arg: GenericArg<'db>, span: Span) -> GenericArg<'db> { + match arg.kind() { + GenericArgKind::Lifetime(_) => self.next_region_var().into(), + GenericArgKind::Type(_) => self.next_ty_var().into(), + GenericArgKind::Const(_) => self.next_const_var().into(), + } + } + + fn leak_check( + &self, + max_input_universe: rustc_type_ir::UniverseIndex, + ) -> Result<(), NoSolution> { + Ok(()) + } + + fn well_formed_goals( + &self, + param_env: ::ParamEnv, + arg: ::Term, + ) -> Option< + Vec< + rustc_type_ir::solve::Goal< + Self::Interner, + ::Predicate, + >, + >, + > { + // FIXME(next-solver): + None + } + + fn make_deduplicated_outlives_constraints( + &self, + ) -> Vec< + rustc_type_ir::OutlivesPredicate< + Self::Interner, + ::GenericArg, + >, + > { + // FIXME: add if we care about regions + vec![] + } + + fn instantiate_canonical( + &self, + canonical: rustc_type_ir::Canonical, + values: rustc_type_ir::CanonicalVarValues, + ) -> V + where + V: rustc_type_ir::TypeFoldable, + { + canonical.instantiate(self.cx(), &values) + } + + fn instantiate_canonical_var( + &self, + kind: CanonicalVarKind<'db>, + span: ::Span, + var_values: &[GenericArg<'db>], + universe_map: impl Fn(rustc_type_ir::UniverseIndex) -> rustc_type_ir::UniverseIndex, + ) -> GenericArg<'db> { + self.0.instantiate_canonical_var(kind, var_values, universe_map) + } + + fn add_item_bounds_for_hidden_type( + &self, + def_id: ::DefId, + args: ::GenericArgs, + param_env: ::ParamEnv, + hidden_ty: ::Ty, + goals: &mut Vec< + rustc_type_ir::solve::Goal< + Self::Interner, + ::Predicate, + >, + >, + ) { + unimplemented!() + } + + fn fetch_eligible_assoc_item( + &self, + goal_trait_ref: rustc_type_ir::TraitRef, + trait_assoc_def_id: ::DefId, + impl_id: ImplIdWrapper, + ) -> Result::DefId>, ErrorGuaranteed> { + let trait_assoc_id = match trait_assoc_def_id { + SolverDefId::TypeAliasId(id) => id, + _ => panic!("Unexpected SolverDefId"), + }; + let trait_ref = self + .0 + .interner + .db() + .impl_trait(impl_id.0) + // ImplIds for impls where the trait ref can't be resolved should never reach solver + .expect("invalid impl passed to next-solver") + .into_value_and_skipped_binders() + .0; + let trait_ = trait_ref.hir_trait_id(); + let trait_data = trait_.trait_items(self.0.interner.db()); + let id = + impl_id.0.impl_items(self.0.interner.db()).items.iter().find_map(|item| -> Option<_> { + match item { + (_, AssocItemId::TypeAliasId(type_alias)) => { + let name = &self.0.interner.db().type_alias_signature(*type_alias).name; + let found_trait_assoc_id = trait_data.associated_type_by_name(name)?; + (found_trait_assoc_id == trait_assoc_id).then_some(*type_alias) + } + _ => None, + } + }); + Ok(id.map(SolverDefId::TypeAliasId)) + } + + fn is_transmutable( + &self, + dst: ::Ty, + src: ::Ty, + assume: ::Const, + ) -> Result { + unimplemented!() + } + + fn evaluate_const( + &self, + param_env: ::ParamEnv, + uv: rustc_type_ir::UnevaluatedConst, + ) -> Option<::Const> { + let c = match uv.def { + SolverDefId::ConstId(c) => GeneralConstId::ConstId(c), + SolverDefId::StaticId(c) => GeneralConstId::StaticId(c), + _ => unreachable!(), + }; + let subst = uv.args.to_chalk(self.interner); + let ec = self.cx().db.const_eval(c, subst, None).ok()?; + Some(ec.to_nextsolver(self.interner)) + } + + fn compute_goal_fast_path( + &self, + goal: rustc_type_ir::solve::Goal< + Self::Interner, + ::Predicate, + >, + span: ::Span, + ) -> Option { + if let Some(trait_pred) = goal.predicate.as_trait_clause() { + if self.shallow_resolve(trait_pred.self_ty().skip_binder()).is_ty_var() + // We don't do this fast path when opaques are defined since we may + // eventually use opaques to incompletely guide inference via ty var + // self types. + // FIXME: Properly consider opaques here. + && self.inner.borrow_mut().opaque_types().is_empty() + { + return Some(Certainty::AMBIGUOUS); + } + + if trait_pred.polarity() == PredicatePolarity::Positive { + match self.0.cx().as_trait_lang_item(trait_pred.def_id()) { + Some(SolverTraitLangItem::Sized) | Some(SolverTraitLangItem::MetaSized) => { + let predicate = self.resolve_vars_if_possible(goal.predicate); + if sizedness_fast_path(self.cx(), predicate, goal.param_env) { + return Some(Certainty::Yes); + } + } + Some(SolverTraitLangItem::Copy | SolverTraitLangItem::Clone) => { + let self_ty = + self.resolve_vars_if_possible(trait_pred.self_ty().skip_binder()); + // Unlike `Sized` traits, which always prefer the built-in impl, + // `Copy`/`Clone` may be shadowed by a param-env candidate which + // could force a lifetime error or guide inference. While that's + // not generally desirable, it is observable, so for now let's + // ignore this fast path for types that have regions or infer. + if !self_ty + .has_type_flags(TypeFlags::HAS_FREE_REGIONS | TypeFlags::HAS_INFER) + && self_ty.is_trivially_pure_clone_copy() + { + return Some(Certainty::Yes); + } + } + _ => {} + } + } + } + + let pred = goal.predicate.kind(); + match pred.no_bound_vars()? { + PredicateKind::Clause(ClauseKind::RegionOutlives(outlives)) => Some(Certainty::Yes), + PredicateKind::Clause(ClauseKind::TypeOutlives(outlives)) => Some(Certainty::Yes), + PredicateKind::Subtype(SubtypePredicate { a, b, .. }) + | PredicateKind::Coerce(CoercePredicate { a, b }) => { + if self.shallow_resolve(a).is_ty_var() && self.shallow_resolve(b).is_ty_var() { + // FIXME: We also need to register a subtype relation between these vars + // when those are added, and if they aren't in the same sub root then + // we should mark this goal as `has_changed`. + Some(Certainty::AMBIGUOUS) + } else { + None + } + } + PredicateKind::Clause(ClauseKind::ConstArgHasType(ct, _)) => { + if self.shallow_resolve_const(ct).is_ct_infer() { + Some(Certainty::AMBIGUOUS) + } else { + None + } + } + PredicateKind::Clause(ClauseKind::WellFormed(arg)) => { + if arg.is_trivially_wf(self.interner) { + Some(Certainty::Yes) + } else if arg.is_infer() { + Some(Certainty::AMBIGUOUS) + } else { + None + } + } + _ => None, + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/structural_normalize.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/structural_normalize.rs new file mode 100644 index 0000000000000..18859d8b79707 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/structural_normalize.rs @@ -0,0 +1,57 @@ +use rustc_type_ir::{AliasRelationDirection, inherent::Term as _}; + +use crate::next_solver::{ + Const, PredicateKind, Term, Ty, + fulfill::{FulfillmentCtxt, NextSolverError}, + infer::{at::At, traits::Obligation}, +}; + +impl<'db> At<'_, 'db> { + pub(crate) fn structurally_normalize_ty( + &self, + ty: Ty<'db>, + fulfill_cx: &mut FulfillmentCtxt<'db>, + ) -> Result, Vec>> { + self.structurally_normalize_term(ty.into(), fulfill_cx).map(|term| term.expect_type()) + } + + pub(crate) fn structurally_normalize_const( + &self, + ct: Const<'db>, + fulfill_cx: &mut FulfillmentCtxt<'db>, + ) -> Result, Vec>> { + self.structurally_normalize_term(ct.into(), fulfill_cx).map(|term| term.expect_const()) + } + + pub(crate) fn structurally_normalize_term( + &self, + term: Term<'db>, + fulfill_cx: &mut FulfillmentCtxt<'db>, + ) -> Result, Vec>> { + assert!(!term.is_infer(), "should have resolved vars before calling"); + + if term.to_alias_term().is_none() { + return Ok(term); + } + + let new_infer = self.infcx.next_term_var_of_kind(term); + + // We simply emit an `alias-eq` goal here, since that will take care of + // normalizing the LHS of the projection until it is a rigid projection + // (or a not-yet-defined opaque in scope). + let obligation = Obligation::new( + self.infcx.interner, + self.cause.clone(), + self.param_env, + PredicateKind::AliasRelate(term, new_infer, AliasRelationDirection::Equate), + ); + + fulfill_cx.register_predicate_obligation(self.infcx, obligation); + let errors = fulfill_cx.select_where_possible(self.infcx); + if !errors.is_empty() { + return Err(errors); + } + + Ok(self.infcx.resolve_vars_if_possible(new_infer)) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs new file mode 100644 index 0000000000000..c7a747ade3e76 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs @@ -0,0 +1,1044 @@ +//! Things related to tys in the next-trait-solver. + +use std::iter; +use std::ops::ControlFlow; + +use hir_def::{GenericDefId, TypeOrConstParamId, TypeParamId}; +use intern::{Interned, Symbol, sym}; +use rustc_abi::{Float, Integer, Size}; +use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult}; +use rustc_type_ir::{ + BoundVar, ClosureKind, CollectAndApply, FlagComputation, Flags, FloatTy, FloatVid, InferTy, + IntTy, IntVid, Interner, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, UintTy, WithCachedTypeInfo, + inherent::{ + Abi, AdtDef, BoundVarLike, Const as _, GenericArgs as _, IntoKind, ParamLike, + PlaceholderLike, Safety as _, SliceLike, Ty as _, + }, + relate::Relate, + solve::SizedTraitKind, + walk::TypeWalker, +}; +use salsa::plumbing::{AsId, FromId}; +use smallvec::SmallVec; + +use crate::{ + FnAbi, + db::HirDatabase, + interner::InternedWrapperNoDebug, + next_solver::{ + CallableIdWrapper, ClosureIdWrapper, Const, CoroutineIdWrapper, FnSig, GenericArg, + PolyFnSig, TypeAliasIdWrapper, + abi::Safety, + util::{CoroutineArgsExt, IntegerTypeExt}, + }, +}; + +use super::{ + BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId, interned_vec_db, + util::{FloatExt, IntegerExt}, +}; + +pub type TyKind<'db> = rustc_type_ir::TyKind>; +pub type FnHeader<'db> = rustc_type_ir::FnHeader>; + +#[salsa::interned(constructor = new_)] +pub struct Ty<'db> { + #[returns(ref)] + kind_: InternedWrapperNoDebug>>, +} + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; + +impl<'db> Ty<'db> { + pub fn new(interner: DbInterner<'db>, kind: TyKind<'db>) -> Self { + let flags = FlagComputation::for_kind(&kind); + let cached = WithCachedTypeInfo { + internee: kind, + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + #[cfg(feature = "in-rust-tree")] + stable_hash: ena::fingerprint::Fingerprint::ZERO, + }; + Ty::new_(interner.db(), InternedWrapperNoDebug(cached)) + } + + pub fn inner(&self) -> &WithCachedTypeInfo> { + salsa::with_attached_database(|db| { + let inner = &self.kind_(db).0; + // SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will + // make sure that our returned value is valid for the lifetime `'db`. + unsafe { std::mem::transmute(inner) } + }) + .unwrap() + } + + pub fn new_param(interner: DbInterner<'db>, id: TypeParamId, index: u32, name: Symbol) -> Self { + Ty::new(interner, TyKind::Param(ParamTy { id, index })) + } + + pub fn new_placeholder(interner: DbInterner<'db>, placeholder: PlaceholderTy) -> Self { + Ty::new(interner, TyKind::Placeholder(placeholder)) + } + + pub fn new_infer(interner: DbInterner<'db>, infer: InferTy) -> Self { + Ty::new(interner, TyKind::Infer(infer)) + } + + pub fn new_int_var(interner: DbInterner<'db>, v: IntVid) -> Self { + Ty::new_infer(interner, InferTy::IntVar(v)) + } + + pub fn new_float_var(interner: DbInterner<'db>, v: FloatVid) -> Self { + Ty::new_infer(interner, InferTy::FloatVar(v)) + } + + pub fn new_int(interner: DbInterner<'db>, i: IntTy) -> Self { + Ty::new(interner, TyKind::Int(i)) + } + + pub fn new_uint(interner: DbInterner<'db>, ui: UintTy) -> Self { + Ty::new(interner, TyKind::Uint(ui)) + } + + pub fn new_float(interner: DbInterner<'db>, f: FloatTy) -> Self { + Ty::new(interner, TyKind::Float(f)) + } + + pub fn new_fresh(interner: DbInterner<'db>, n: u32) -> Self { + Ty::new_infer(interner, InferTy::FreshTy(n)) + } + + pub fn new_fresh_int(interner: DbInterner<'db>, n: u32) -> Self { + Ty::new_infer(interner, InferTy::FreshIntTy(n)) + } + + pub fn new_fresh_float(interner: DbInterner<'db>, n: u32) -> Self { + Ty::new_infer(interner, InferTy::FreshFloatTy(n)) + } + + pub fn new_empty_tuple(interner: DbInterner<'db>) -> Self { + Ty::new_tup(interner, &[]) + } + + /// Returns the `Size` for primitive types (bool, uint, int, char, float). + pub fn primitive_size(self, interner: DbInterner<'db>) -> Size { + match self.kind() { + TyKind::Bool => Size::from_bytes(1), + TyKind::Char => Size::from_bytes(4), + TyKind::Int(ity) => Integer::from_int_ty(&interner, ity).size(), + TyKind::Uint(uty) => Integer::from_uint_ty(&interner, uty).size(), + TyKind::Float(fty) => Float::from_float_ty(fty).size(), + _ => panic!("non primitive type"), + } + } + + pub fn int_size_and_signed(self, interner: DbInterner<'db>) -> (Size, bool) { + match self.kind() { + TyKind::Int(ity) => (Integer::from_int_ty(&interner, ity).size(), true), + TyKind::Uint(uty) => (Integer::from_uint_ty(&interner, uty).size(), false), + _ => panic!("non integer discriminant"), + } + } + + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self.into()) + } + + /// Fast path helper for testing if a type is `Sized` or `MetaSized`. + /// + /// Returning true means the type is known to implement the sizedness trait. Returning `false` + /// means nothing -- could be sized, might not be. + /// + /// Note that we could never rely on the fact that a type such as `[_]` is trivially `!Sized` + /// because we could be in a type environment with a bound such as `[_]: Copy`. A function with + /// such a bound obviously never can be called, but that doesn't mean it shouldn't typecheck. + /// This is why this method doesn't return `Option`. + #[tracing::instrument(skip(tcx), level = "debug")] + pub fn has_trivial_sizedness(self, tcx: DbInterner<'db>, sizedness: SizedTraitKind) -> bool { + match self.kind() { + TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) + | TyKind::Uint(_) + | TyKind::Int(_) + | TyKind::Bool + | TyKind::Float(_) + | TyKind::FnDef(..) + | TyKind::FnPtr(..) + | TyKind::UnsafeBinder(_) + | TyKind::RawPtr(..) + | TyKind::Char + | TyKind::Ref(..) + | TyKind::Coroutine(..) + | TyKind::CoroutineWitness(..) + | TyKind::Array(..) + | TyKind::Pat(..) + | TyKind::Closure(..) + | TyKind::CoroutineClosure(..) + | TyKind::Never + | TyKind::Error(_) => true, + + TyKind::Str | TyKind::Slice(_) | TyKind::Dynamic(_, _) => match sizedness { + SizedTraitKind::Sized => false, + SizedTraitKind::MetaSized => true, + }, + + TyKind::Foreign(..) => match sizedness { + SizedTraitKind::Sized | SizedTraitKind::MetaSized => false, + }, + + TyKind::Tuple(tys) => { + tys.last().is_none_or(|ty| ty.has_trivial_sizedness(tcx, sizedness)) + } + + TyKind::Adt(def, args) => def + .sizedness_constraint(tcx, sizedness) + .is_none_or(|ty| ty.instantiate(tcx, args).has_trivial_sizedness(tcx, sizedness)), + + TyKind::Alias(..) | TyKind::Param(_) | TyKind::Placeholder(..) | TyKind::Bound(..) => { + false + } + + TyKind::Infer(InferTy::TyVar(_)) => false, + + TyKind::Infer( + InferTy::FreshTy(_) | InferTy::FreshIntTy(_) | InferTy::FreshFloatTy(_), + ) => { + panic!("`has_trivial_sizedness` applied to unexpected type: {self:?}") + } + } + } + + /// Fast path helper for primitives which are always `Copy` and which + /// have a side-effect-free `Clone` impl. + /// + /// Returning true means the type is known to be pure and `Copy+Clone`. + /// Returning `false` means nothing -- could be `Copy`, might not be. + /// + /// This is mostly useful for optimizations, as these are the types + /// on which we can replace cloning with dereferencing. + pub fn is_trivially_pure_clone_copy(self) -> bool { + match self.kind() { + TyKind::Bool | TyKind::Char | TyKind::Never => true, + + // These aren't even `Clone` + TyKind::Str | TyKind::Slice(..) | TyKind::Foreign(..) | TyKind::Dynamic(..) => false, + + TyKind::Infer(InferTy::FloatVar(_) | InferTy::IntVar(_)) + | TyKind::Int(..) + | TyKind::Uint(..) + | TyKind::Float(..) => true, + + // ZST which can't be named are fine. + TyKind::FnDef(..) => true, + + TyKind::Array(element_ty, _len) => element_ty.is_trivially_pure_clone_copy(), + + // A 100-tuple isn't "trivial", so doing this only for reasonable sizes. + TyKind::Tuple(field_tys) => { + field_tys.len() <= 3 && field_tys.iter().all(Self::is_trivially_pure_clone_copy) + } + + TyKind::Pat(ty, _) => ty.is_trivially_pure_clone_copy(), + + // Sometimes traits aren't implemented for every ABI or arity, + // because we can't be generic over everything yet. + TyKind::FnPtr(..) => false, + + // Definitely absolutely not copy. + TyKind::Ref(_, _, Mutability::Mut) => false, + + // The standard library has a blanket Copy impl for shared references and raw pointers, + // for all unsized types. + TyKind::Ref(_, _, Mutability::Not) | TyKind::RawPtr(..) => true, + + TyKind::Coroutine(..) | TyKind::CoroutineWitness(..) => false, + + // Might be, but not "trivial" so just giving the safe answer. + TyKind::Adt(..) | TyKind::Closure(..) | TyKind::CoroutineClosure(..) => false, + + TyKind::UnsafeBinder(_) => false, + + // Needs normalization or revealing to determine, so no is the safe answer. + TyKind::Alias(..) => false, + + TyKind::Param(..) + | TyKind::Placeholder(..) + | TyKind::Bound(..) + | TyKind::Infer(..) + | TyKind::Error(..) => false, + } + } + + pub fn is_trivially_wf(self, tcx: DbInterner<'db>) -> bool { + match self.kind() { + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Str + | TyKind::Never + | TyKind::Param(_) + | TyKind::Placeholder(_) + | TyKind::Bound(..) => true, + + TyKind::Slice(ty) => { + ty.is_trivially_wf(tcx) && ty.has_trivial_sizedness(tcx, SizedTraitKind::Sized) + } + TyKind::RawPtr(ty, _) => ty.is_trivially_wf(tcx), + + TyKind::FnPtr(sig_tys, _) => { + sig_tys.skip_binder().inputs_and_output.iter().all(|ty| ty.is_trivially_wf(tcx)) + } + TyKind::Ref(_, ty, _) => ty.is_global() && ty.is_trivially_wf(tcx), + + TyKind::Infer(infer) => match infer { + InferTy::TyVar(_) => false, + InferTy::IntVar(_) | InferTy::FloatVar(_) => true, + InferTy::FreshTy(_) | InferTy::FreshIntTy(_) | InferTy::FreshFloatTy(_) => true, + }, + + TyKind::Adt(_, _) + | TyKind::Tuple(_) + | TyKind::Array(..) + | TyKind::Foreign(_) + | TyKind::Pat(_, _) + | TyKind::FnDef(..) + | TyKind::UnsafeBinder(..) + | TyKind::Dynamic(..) + | TyKind::Closure(..) + | TyKind::CoroutineClosure(..) + | TyKind::Coroutine(..) + | TyKind::CoroutineWitness(..) + | TyKind::Alias(..) + | TyKind::Error(_) => false, + } + } + + #[inline] + pub fn is_never(self) -> bool { + matches!(self.kind(), TyKind::Never) + } + + #[inline] + pub fn is_infer(self) -> bool { + matches!(self.kind(), TyKind::Infer(..)) + } + + #[inline] + pub fn is_str(self) -> bool { + matches!(self.kind(), TyKind::Str) + } + + #[inline] + pub fn is_unit(self) -> bool { + matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty()) + } + + /// Given a `fn` type, returns an equivalent `unsafe fn` type; + /// that is, a `fn` type that is equivalent in every way for being + /// unsafe. + pub fn safe_to_unsafe_fn_ty(interner: DbInterner<'db>, sig: PolyFnSig<'db>) -> Ty<'db> { + assert!(sig.safety().is_safe()); + Ty::new_fn_ptr(interner, sig.map_bound(|sig| FnSig { safety: Safety::Unsafe, ..sig })) + } + + /// Returns the type of `*ty`. + /// + /// The parameter `explicit` indicates if this is an *explicit* dereference. + /// Some types -- notably raw ptrs -- can only be dereferenced explicitly. + pub fn builtin_deref(self, db: &dyn HirDatabase, explicit: bool) -> Option> { + match self.kind() { + TyKind::Adt(adt, substs) if crate::lang_items::is_box(db, adt.def_id().0) => { + Some(substs.as_slice()[0].expect_ty()) + } + TyKind::Ref(_, ty, _) => Some(ty), + TyKind::RawPtr(ty, _) if explicit => Some(ty), + _ => None, + } + } + + /// Whether the type contains some non-lifetime, aka. type or const, error type. + pub fn references_non_lt_error(self) -> bool { + self.references_error() && self.visit_with(&mut ReferencesNonLifetimeError).is_break() + } +} + +struct ReferencesNonLifetimeError; + +impl<'db> TypeVisitor> for ReferencesNonLifetimeError { + type Result = ControlFlow<()>; + + fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result { + if ty.is_ty_error() { ControlFlow::Break(()) } else { ty.super_visit_with(self) } + } + + fn visit_const(&mut self, c: Const<'db>) -> Self::Result { + if c.is_ct_error() { ControlFlow::Break(()) } else { c.super_visit_with(self) } + } +} + +impl<'db> std::fmt::Debug for Ty<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.inner().internee.fmt(f) + } +} + +impl<'db> std::fmt::Debug for InternedWrapperNoDebug>> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.internee.fmt(f) + } +} + +impl<'db> IntoKind for Ty<'db> { + type Kind = TyKind<'db>; + + fn kind(self) -> Self::Kind { + self.inner().internee + } +} + +impl<'db> TypeVisitable> for Ty<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + visitor.visit_ty(*self) + } +} + +impl<'db> TypeSuperVisitable> for Ty<'db> { + fn super_visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + match (*self).kind() { + TyKind::RawPtr(ty, _mutbl) => ty.visit_with(visitor), + TyKind::Array(typ, sz) => { + try_visit!(typ.visit_with(visitor)); + sz.visit_with(visitor) + } + TyKind::Slice(typ) => typ.visit_with(visitor), + TyKind::Adt(_, args) => args.visit_with(visitor), + TyKind::Dynamic(ref trait_ty, ref reg) => { + try_visit!(trait_ty.visit_with(visitor)); + reg.visit_with(visitor) + } + TyKind::Tuple(ts) => ts.visit_with(visitor), + TyKind::FnDef(_, args) => args.visit_with(visitor), + TyKind::FnPtr(ref sig_tys, _) => sig_tys.visit_with(visitor), + TyKind::UnsafeBinder(f) => f.visit_with(visitor), + TyKind::Ref(r, ty, _) => { + try_visit!(r.visit_with(visitor)); + ty.visit_with(visitor) + } + TyKind::Coroutine(_did, ref args) => args.visit_with(visitor), + TyKind::CoroutineWitness(_did, ref args) => args.visit_with(visitor), + TyKind::Closure(_did, ref args) => args.visit_with(visitor), + TyKind::CoroutineClosure(_did, ref args) => args.visit_with(visitor), + TyKind::Alias(_, ref data) => data.visit_with(visitor), + + TyKind::Pat(ty, pat) => { + try_visit!(ty.visit_with(visitor)); + pat.visit_with(visitor) + } + + TyKind::Error(guar) => guar.visit_with(visitor), + + TyKind::Bool + | TyKind::Char + | TyKind::Str + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Infer(_) + | TyKind::Bound(..) + | TyKind::Placeholder(..) + | TyKind::Param(..) + | TyKind::Never + | TyKind::Foreign(..) => V::Result::output(), + } + } +} + +impl<'db> TypeFoldable> for Ty<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + folder.try_fold_ty(self) + } + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_ty(self) + } +} + +impl<'db> TypeSuperFoldable> for Ty<'db> { + fn try_super_fold_with>>( + self, + folder: &mut F, + ) -> Result { + let kind = match self.kind() { + TyKind::RawPtr(ty, mutbl) => TyKind::RawPtr(ty.try_fold_with(folder)?, mutbl), + TyKind::Array(typ, sz) => { + TyKind::Array(typ.try_fold_with(folder)?, sz.try_fold_with(folder)?) + } + TyKind::Slice(typ) => TyKind::Slice(typ.try_fold_with(folder)?), + TyKind::Adt(tid, args) => TyKind::Adt(tid, args.try_fold_with(folder)?), + TyKind::Dynamic(trait_ty, region) => { + TyKind::Dynamic(trait_ty.try_fold_with(folder)?, region.try_fold_with(folder)?) + } + TyKind::Tuple(ts) => TyKind::Tuple(ts.try_fold_with(folder)?), + TyKind::FnDef(def_id, args) => TyKind::FnDef(def_id, args.try_fold_with(folder)?), + TyKind::FnPtr(sig_tys, hdr) => TyKind::FnPtr(sig_tys.try_fold_with(folder)?, hdr), + TyKind::UnsafeBinder(f) => TyKind::UnsafeBinder(f.try_fold_with(folder)?), + TyKind::Ref(r, ty, mutbl) => { + TyKind::Ref(r.try_fold_with(folder)?, ty.try_fold_with(folder)?, mutbl) + } + TyKind::Coroutine(did, args) => TyKind::Coroutine(did, args.try_fold_with(folder)?), + TyKind::CoroutineWitness(did, args) => { + TyKind::CoroutineWitness(did, args.try_fold_with(folder)?) + } + TyKind::Closure(did, args) => TyKind::Closure(did, args.try_fold_with(folder)?), + TyKind::CoroutineClosure(did, args) => { + TyKind::CoroutineClosure(did, args.try_fold_with(folder)?) + } + TyKind::Alias(kind, data) => TyKind::Alias(kind, data.try_fold_with(folder)?), + TyKind::Pat(ty, pat) => { + TyKind::Pat(ty.try_fold_with(folder)?, pat.try_fold_with(folder)?) + } + + TyKind::Bool + | TyKind::Char + | TyKind::Str + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Error(_) + | TyKind::Infer(_) + | TyKind::Param(..) + | TyKind::Bound(..) + | TyKind::Placeholder(..) + | TyKind::Never + | TyKind::Foreign(..) => return Ok(self), + }; + + Ok(if self.kind() == kind { self } else { Ty::new(folder.cx(), kind) }) + } + fn super_fold_with>>( + self, + folder: &mut F, + ) -> Self { + let kind = match self.kind() { + TyKind::RawPtr(ty, mutbl) => TyKind::RawPtr(ty.fold_with(folder), mutbl), + TyKind::Array(typ, sz) => TyKind::Array(typ.fold_with(folder), sz.fold_with(folder)), + TyKind::Slice(typ) => TyKind::Slice(typ.fold_with(folder)), + TyKind::Adt(tid, args) => TyKind::Adt(tid, args.fold_with(folder)), + TyKind::Dynamic(trait_ty, region) => { + TyKind::Dynamic(trait_ty.fold_with(folder), region.fold_with(folder)) + } + TyKind::Tuple(ts) => TyKind::Tuple(ts.fold_with(folder)), + TyKind::FnDef(def_id, args) => TyKind::FnDef(def_id, args.fold_with(folder)), + TyKind::FnPtr(sig_tys, hdr) => TyKind::FnPtr(sig_tys.fold_with(folder), hdr), + TyKind::UnsafeBinder(f) => TyKind::UnsafeBinder(f.fold_with(folder)), + TyKind::Ref(r, ty, mutbl) => { + TyKind::Ref(r.fold_with(folder), ty.fold_with(folder), mutbl) + } + TyKind::Coroutine(did, args) => TyKind::Coroutine(did, args.fold_with(folder)), + TyKind::CoroutineWitness(did, args) => { + TyKind::CoroutineWitness(did, args.fold_with(folder)) + } + TyKind::Closure(did, args) => TyKind::Closure(did, args.fold_with(folder)), + TyKind::CoroutineClosure(did, args) => { + TyKind::CoroutineClosure(did, args.fold_with(folder)) + } + TyKind::Alias(kind, data) => TyKind::Alias(kind, data.fold_with(folder)), + TyKind::Pat(ty, pat) => TyKind::Pat(ty.fold_with(folder), pat.fold_with(folder)), + + TyKind::Bool + | TyKind::Char + | TyKind::Str + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Error(_) + | TyKind::Infer(_) + | TyKind::Param(..) + | TyKind::Bound(..) + | TyKind::Placeholder(..) + | TyKind::Never + | TyKind::Foreign(..) => return self, + }; + + if self.kind() == kind { self } else { Ty::new(folder.cx(), kind) } + } +} + +impl<'db> Relate> for Ty<'db> { + fn relate>>( + relation: &mut R, + a: Self, + b: Self, + ) -> rustc_type_ir::relate::RelateResult, Self> { + relation.tys(a, b) + } +} + +impl<'db> Flags for Ty<'db> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + self.inner().flags + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + self.inner().outer_exclusive_binder + } +} + +impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { + fn new_unit(interner: DbInterner<'db>) -> Self { + Ty::new(interner, TyKind::Tuple(Default::default())) + } + + fn new_bool(interner: DbInterner<'db>) -> Self { + Ty::new(interner, TyKind::Bool) + } + + fn new_u8(interner: DbInterner<'db>) -> Self { + Ty::new(interner, TyKind::Uint(rustc_type_ir::UintTy::U8)) + } + + fn new_usize(interner: DbInterner<'db>) -> Self { + Ty::new(interner, TyKind::Uint(rustc_type_ir::UintTy::Usize)) + } + + fn new_infer(interner: DbInterner<'db>, var: rustc_type_ir::InferTy) -> Self { + Ty::new(interner, TyKind::Infer(var)) + } + + fn new_var(interner: DbInterner<'db>, var: rustc_type_ir::TyVid) -> Self { + Ty::new(interner, TyKind::Infer(rustc_type_ir::InferTy::TyVar(var))) + } + + fn new_param(interner: DbInterner<'db>, param: ParamTy) -> Self { + Ty::new(interner, TyKind::Param(param)) + } + + fn new_placeholder(interner: DbInterner<'db>, param: PlaceholderTy) -> Self { + Ty::new(interner, TyKind::Placeholder(param)) + } + + fn new_bound( + interner: DbInterner<'db>, + debruijn: rustc_type_ir::DebruijnIndex, + var: BoundTy, + ) -> Self { + Ty::new(interner, TyKind::Bound(debruijn, var)) + } + + fn new_anon_bound( + interner: DbInterner<'db>, + debruijn: rustc_type_ir::DebruijnIndex, + var: BoundVar, + ) -> Self { + Ty::new(interner, TyKind::Bound(debruijn, BoundTy { var, kind: BoundTyKind::Anon })) + } + + fn new_alias( + interner: DbInterner<'db>, + kind: rustc_type_ir::AliasTyKind, + alias_ty: rustc_type_ir::AliasTy>, + ) -> Self { + Ty::new(interner, TyKind::Alias(kind, alias_ty)) + } + + fn new_error(interner: DbInterner<'db>, guar: ErrorGuaranteed) -> Self { + Ty::new(interner, TyKind::Error(guar)) + } + + fn new_adt( + interner: DbInterner<'db>, + adt_def: as rustc_type_ir::Interner>::AdtDef, + args: GenericArgs<'db>, + ) -> Self { + Ty::new(interner, TyKind::Adt(adt_def, args)) + } + + fn new_foreign(interner: DbInterner<'db>, def_id: TypeAliasIdWrapper) -> Self { + Ty::new(interner, TyKind::Foreign(def_id)) + } + + fn new_dynamic( + interner: DbInterner<'db>, + preds: as rustc_type_ir::Interner>::BoundExistentialPredicates, + region: as rustc_type_ir::Interner>::Region, + ) -> Self { + Ty::new(interner, TyKind::Dynamic(preds, region)) + } + + fn new_coroutine( + interner: DbInterner<'db>, + def_id: CoroutineIdWrapper, + args: as rustc_type_ir::Interner>::GenericArgs, + ) -> Self { + Ty::new(interner, TyKind::Coroutine(def_id, args)) + } + + fn new_coroutine_closure( + interner: DbInterner<'db>, + def_id: CoroutineIdWrapper, + args: as rustc_type_ir::Interner>::GenericArgs, + ) -> Self { + Ty::new(interner, TyKind::CoroutineClosure(def_id, args)) + } + + fn new_closure( + interner: DbInterner<'db>, + def_id: ClosureIdWrapper, + args: as rustc_type_ir::Interner>::GenericArgs, + ) -> Self { + Ty::new(interner, TyKind::Closure(def_id, args)) + } + + fn new_coroutine_witness( + interner: DbInterner<'db>, + def_id: CoroutineIdWrapper, + args: as rustc_type_ir::Interner>::GenericArgs, + ) -> Self { + Ty::new(interner, TyKind::CoroutineWitness(def_id, args)) + } + + fn new_coroutine_witness_for_coroutine( + interner: DbInterner<'db>, + def_id: CoroutineIdWrapper, + coroutine_args: as rustc_type_ir::Interner>::GenericArgs, + ) -> Self { + // HACK: Coroutine witness types are lifetime erased, so they + // never reference any lifetime args from the coroutine. We erase + // the regions here since we may get into situations where a + // coroutine is recursively contained within itself, leading to + // witness types that differ by region args. This means that + // cycle detection in fulfillment will not kick in, which leads + // to unnecessary overflows in async code. See the issue: + // . + let coroutine_args = interner.mk_args_from_iter(coroutine_args.iter().map(|arg| { + match arg { + GenericArg::Ty(_) | GenericArg::Const(_) => arg, + GenericArg::Lifetime(_) => { + crate::next_solver::Region::new(interner, rustc_type_ir::RegionKind::ReErased) + .into() + } + } + })); + Ty::new_coroutine_witness(interner, def_id, coroutine_args) + } + + fn new_ptr(interner: DbInterner<'db>, ty: Self, mutbl: rustc_ast_ir::Mutability) -> Self { + Ty::new(interner, TyKind::RawPtr(ty, mutbl)) + } + + fn new_ref( + interner: DbInterner<'db>, + region: as rustc_type_ir::Interner>::Region, + ty: Self, + mutbl: rustc_ast_ir::Mutability, + ) -> Self { + Ty::new(interner, TyKind::Ref(region, ty, mutbl)) + } + + fn new_array_with_const_len( + interner: DbInterner<'db>, + ty: Self, + len: as rustc_type_ir::Interner>::Const, + ) -> Self { + Ty::new(interner, TyKind::Array(ty, len)) + } + + fn new_slice(interner: DbInterner<'db>, ty: Self) -> Self { + Ty::new(interner, TyKind::Slice(ty)) + } + + fn new_tup( + interner: DbInterner<'db>, + tys: &[ as rustc_type_ir::Interner>::Ty], + ) -> Self { + Ty::new(interner, TyKind::Tuple(Tys::new_from_iter(interner, tys.iter().cloned()))) + } + + fn new_tup_from_iter(interner: DbInterner<'db>, iter: It) -> T::Output + where + It: Iterator, + T: rustc_type_ir::CollectAndApply, + { + T::collect_and_apply(iter, |ts| Ty::new_tup(interner, ts)) + } + + fn new_fn_def( + interner: DbInterner<'db>, + def_id: CallableIdWrapper, + args: as rustc_type_ir::Interner>::GenericArgs, + ) -> Self { + Ty::new(interner, TyKind::FnDef(def_id, args)) + } + + fn new_fn_ptr( + interner: DbInterner<'db>, + sig: rustc_type_ir::Binder, rustc_type_ir::FnSig>>, + ) -> Self { + let (sig_tys, header) = sig.split(); + Ty::new(interner, TyKind::FnPtr(sig_tys, header)) + } + + fn new_pat( + interner: DbInterner<'db>, + ty: Self, + pat: as rustc_type_ir::Interner>::Pat, + ) -> Self { + Ty::new(interner, TyKind::Pat(ty, pat)) + } + + fn tuple_fields(self) -> as rustc_type_ir::Interner>::Tys { + match self.kind() { + TyKind::Tuple(args) => args, + _ => panic!("tuple_fields called on non-tuple: {self:?}"), + } + } + + fn to_opt_closure_kind(self) -> Option { + match self.kind() { + TyKind::Int(int_ty) => match int_ty { + IntTy::I8 => Some(ClosureKind::Fn), + IntTy::I16 => Some(ClosureKind::FnMut), + IntTy::I32 => Some(ClosureKind::FnOnce), + _ => unreachable!("cannot convert type `{:?}` to a closure kind", self), + }, + + // "Bound" types appear in canonical queries when the + // closure type is not yet known, and `Placeholder` and `Param` + // may be encountered in generic `AsyncFnKindHelper` goals. + TyKind::Bound(..) | TyKind::Placeholder(_) | TyKind::Param(_) | TyKind::Infer(_) => { + None + } + + TyKind::Error(_) => Some(ClosureKind::Fn), + + _ => unreachable!("cannot convert type `{:?}` to a closure kind", self), + } + } + + fn from_closure_kind(interner: DbInterner<'db>, kind: rustc_type_ir::ClosureKind) -> Self { + match kind { + ClosureKind::Fn => Ty::new(interner, TyKind::Int(IntTy::I8)), + ClosureKind::FnMut => Ty::new(interner, TyKind::Int(IntTy::I16)), + ClosureKind::FnOnce => Ty::new(interner, TyKind::Int(IntTy::I32)), + } + } + + fn from_coroutine_closure_kind( + interner: DbInterner<'db>, + kind: rustc_type_ir::ClosureKind, + ) -> Self { + match kind { + ClosureKind::Fn | ClosureKind::FnMut => Ty::new(interner, TyKind::Int(IntTy::I16)), + ClosureKind::FnOnce => Ty::new(interner, TyKind::Int(IntTy::I32)), + } + } + + fn discriminant_ty( + self, + interner: DbInterner<'db>, + ) -> as rustc_type_ir::Interner>::Ty { + match self.kind() { + TyKind::Adt(adt, _) if adt.is_enum() => adt.repr().discr_type().to_ty(interner), + TyKind::Coroutine(_, args) => args.as_coroutine().discr_ty(interner), + + TyKind::Param(_) | TyKind::Alias(..) | TyKind::Infer(InferTy::TyVar(_)) => { + /* + let assoc_items = tcx.associated_item_def_ids( + tcx.require_lang_item(hir::LangItem::DiscriminantKind, None), + ); + TyKind::new_projection_from_args(tcx, assoc_items[0], tcx.mk_args(&[self.into()])) + */ + unimplemented!() + } + + TyKind::Pat(ty, _) => ty.discriminant_ty(interner), + + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Adt(..) + | TyKind::Foreign(_) + | TyKind::Str + | TyKind::Array(..) + | TyKind::Slice(_) + | TyKind::RawPtr(_, _) + | TyKind::Ref(..) + | TyKind::FnDef(..) + | TyKind::FnPtr(..) + | TyKind::Dynamic(..) + | TyKind::Closure(..) + | TyKind::CoroutineClosure(..) + | TyKind::CoroutineWitness(..) + | TyKind::Never + | TyKind::Tuple(_) + | TyKind::Error(_) + | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => { + Ty::new(interner, TyKind::Uint(UintTy::U8)) + } + + TyKind::Bound(..) + | TyKind::Placeholder(_) + | TyKind::Infer( + InferTy::FreshTy(_) | InferTy::FreshIntTy(_) | InferTy::FreshFloatTy(_), + ) => { + panic!( + "`dself.iter().map(|v| v.try_fold_with(folder)).collect::>()?iscriminant_ty` applied to unexpected type: {self:?}" + ) + } + TyKind::UnsafeBinder(..) => unimplemented!(), + } + } + + fn new_unsafe_binder( + interner: DbInterner<'db>, + ty: rustc_type_ir::Binder< + DbInterner<'db>, + as rustc_type_ir::Interner>::Ty, + >, + ) -> Self { + Ty::new(interner, TyKind::UnsafeBinder(ty.into())) + } + + fn has_unsafe_fields(self) -> bool { + false + } +} + +interned_vec_db!(Tys, Ty); + +impl<'db> rustc_type_ir::inherent::Tys> for Tys<'db> { + fn inputs(self) -> as rustc_type_ir::Interner>::FnInputTys { + Tys::new_from_iter( + DbInterner::conjure(), + self.as_slice().split_last().unwrap().1.iter().cloned(), + ) + } + + fn output(self) -> as rustc_type_ir::Interner>::Ty { + *self.as_slice().split_last().unwrap().0 + } +} + +pub type PlaceholderTy = Placeholder; + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct ParamTy { + // FIXME: I'm not pleased with this. Ideally a `Param` should only know its index - the defining item + // is known from the `EarlyBinder`. This should also be beneficial for memory usage. But code currently + // assumes it can get the definition from `Param` alone - so that's what we got. + pub id: TypeParamId, + pub index: u32, +} + +impl ParamTy { + pub fn to_ty<'db>(self, interner: DbInterner<'db>) -> Ty<'db> { + Ty::new_param(interner, self.id, self.index, sym::MISSING_NAME.clone()) + } +} + +impl std::fmt::Debug for ParamTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "#{}", self.index) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct BoundTy { + pub var: BoundVar, + // FIXME: This is for diagnostics in rustc, do we really need it? + pub kind: BoundTyKind, +} + +impl std::fmt::Debug for BoundTy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self.kind { + BoundTyKind::Anon => write!(f, "{:?}", self.var), + BoundTyKind::Param(def_id) => write!(f, "{def_id:?}"), + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub enum BoundTyKind { + Anon, + Param(SolverDefId), +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub struct ErrorGuaranteed; + +impl<'db> TypeVisitable> for ErrorGuaranteed { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + visitor.visit_error(*self) + } +} + +impl<'db> TypeFoldable> for ErrorGuaranteed { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(self) + } + fn fold_with>>(self, folder: &mut F) -> Self { + self + } +} + +impl ParamLike for ParamTy { + fn index(self) -> u32 { + self.index + } +} + +impl<'db> BoundVarLike> for BoundTy { + fn var(self) -> BoundVar { + self.var + } + + fn assert_eq(self, var: BoundVarKind) { + assert_eq!(self.kind, var.expect_ty()) + } +} + +impl<'db> PlaceholderLike> for PlaceholderTy { + type Bound = BoundTy; + + fn universe(self) -> rustc_type_ir::UniverseIndex { + self.universe + } + + fn var(self) -> BoundVar { + self.bound.var + } + + fn with_updated_universe(self, ui: rustc_type_ir::UniverseIndex) -> Self { + Placeholder { universe: ui, bound: self.bound } + } + + fn new(ui: rustc_type_ir::UniverseIndex, bound: BoundTy) -> Self { + Placeholder { universe: ui, bound } + } + + fn new_anon(ui: rustc_type_ir::UniverseIndex, var: rustc_type_ir::BoundVar) -> Self { + Placeholder { universe: ui, bound: BoundTy { var, kind: BoundTyKind::Anon } } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs new file mode 100644 index 0000000000000..a7f9817f9c08e --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs @@ -0,0 +1,1064 @@ +//! Various utilities for the next-trait-solver. + +use std::iter; +use std::ops::{self, ControlFlow}; + +use base_db::Crate; +use hir_def::lang_item::LangItem; +use hir_def::{BlockId, HasModule, ItemContainerId, Lookup}; +use intern::sym; +use la_arena::Idx; +use rustc_abi::{Float, HasDataLayout, Integer, IntegerType, Primitive, ReprOptions}; +use rustc_type_ir::data_structures::IndexMap; +use rustc_type_ir::inherent::{ + AdtDef, Const as _, GenericArg as _, GenericArgs as _, ParamEnv as _, Region as _, SliceLike, + Ty as _, +}; +use rustc_type_ir::lang_items::SolverTraitLangItem; +use rustc_type_ir::solve::SizedTraitKind; +use rustc_type_ir::{ + BoundVar, Canonical, DebruijnIndex, GenericArgKind, INNERMOST, Interner, PredicatePolarity, + TypeFlags, TypeVisitable, TypeVisitableExt, +}; +use rustc_type_ir::{ + ConstKind, CoroutineArgs, FloatTy, IntTy, RegionKind, TypeFolder, TypeSuperFoldable, + TypeSuperVisitable, TypeVisitor, UintTy, UniverseIndex, inherent::IntoKind, +}; +use rustc_type_ir::{InferCtxtLike, TypeFoldable}; + +use crate::lower_nextsolver::{LifetimeElisionKind, TyLoweringContext}; +use crate::next_solver::infer::InferCtxt; +use crate::next_solver::{ + BoundConst, CanonicalVarKind, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, + PlaceholderRegion, TypingMode, +}; +use crate::{ + db::HirDatabase, + from_foreign_def_id, + method_resolution::{TraitImpls, TyFingerprint}, +}; + +use super::fold::{BoundVarReplacer, FnMutDelegate}; +use super::generics::generics; +use super::{ + AliasTerm, AliasTy, Binder, BoundRegion, BoundTy, BoundTyKind, BoundVarKind, BoundVarKinds, + CanonicalVars, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, GenericArg, + GenericArgs, Predicate, PredicateKind, ProjectionPredicate, Region, SolverContext, SolverDefId, + Term, TraitPredicate, TraitRef, Ty, TyKind, +}; + +#[derive(Clone, Debug)] +pub struct Discr<'db> { + /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`). + pub val: u128, + pub ty: Ty<'db>, +} + +impl<'db> Discr<'db> { + /// Adds `1` to the value and wraps around if the maximum for the type is reached. + pub fn wrap_incr(self, interner: DbInterner<'db>) -> Self { + self.checked_add(interner, 1).0 + } + pub fn checked_add(self, interner: DbInterner<'db>, n: u128) -> (Self, bool) { + let (size, signed) = self.ty.int_size_and_signed(interner); + let (val, oflo) = if signed { + let min = size.signed_int_min(); + let max = size.signed_int_max(); + let val = size.sign_extend(self.val); + assert!(n < (i128::MAX as u128)); + let n = n as i128; + let oflo = val > max - n; + let val = if oflo { min + (n - (max - val) - 1) } else { val + n }; + // zero the upper bits + let val = val as u128; + let val = size.truncate(val); + (val, oflo) + } else { + let max = size.unsigned_int_max(); + let val = self.val; + let oflo = val > max - n; + let val = if oflo { n - (max - val) - 1 } else { val + n }; + (val, oflo) + }; + (Self { val, ty: self.ty }, oflo) + } +} + +pub trait IntegerTypeExt { + fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db>; + fn initial_discriminant<'db>(&self, interner: DbInterner<'db>) -> Discr<'db>; + fn disr_incr<'db>( + &self, + interner: DbInterner<'db>, + val: Option>, + ) -> Option>; +} + +impl IntegerTypeExt for IntegerType { + fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> { + match self { + IntegerType::Pointer(true) => Ty::new(interner, TyKind::Int(IntTy::Isize)), + IntegerType::Pointer(false) => Ty::new(interner, TyKind::Uint(UintTy::Usize)), + IntegerType::Fixed(i, s) => i.to_ty(interner, *s), + } + } + + fn initial_discriminant<'db>(&self, interner: DbInterner<'db>) -> Discr<'db> { + Discr { val: 0, ty: self.to_ty(interner) } + } + + fn disr_incr<'db>( + &self, + interner: DbInterner<'db>, + val: Option>, + ) -> Option> { + if let Some(val) = val { + assert_eq!(self.to_ty(interner), val.ty); + let (new, oflo) = val.checked_add(interner, 1); + if oflo { None } else { Some(new) } + } else { + Some(self.initial_discriminant(interner)) + } + } +} + +pub trait IntegerExt { + fn to_ty<'db>(&self, interner: DbInterner<'db>, signed: bool) -> Ty<'db>; + fn from_int_ty(cx: &C, ity: IntTy) -> Integer; + fn from_uint_ty(cx: &C, ity: UintTy) -> Integer; + fn repr_discr<'db>( + interner: DbInterner<'db>, + ty: Ty<'db>, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> (Integer, bool); +} + +impl IntegerExt for Integer { + #[inline] + fn to_ty<'db>(&self, interner: DbInterner<'db>, signed: bool) -> Ty<'db> { + use Integer::*; + match (*self, signed) { + (I8, false) => Ty::new(interner, TyKind::Uint(UintTy::U8)), + (I16, false) => Ty::new(interner, TyKind::Uint(UintTy::U16)), + (I32, false) => Ty::new(interner, TyKind::Uint(UintTy::U32)), + (I64, false) => Ty::new(interner, TyKind::Uint(UintTy::U64)), + (I128, false) => Ty::new(interner, TyKind::Uint(UintTy::U128)), + (I8, true) => Ty::new(interner, TyKind::Int(IntTy::I8)), + (I16, true) => Ty::new(interner, TyKind::Int(IntTy::I16)), + (I32, true) => Ty::new(interner, TyKind::Int(IntTy::I32)), + (I64, true) => Ty::new(interner, TyKind::Int(IntTy::I64)), + (I128, true) => Ty::new(interner, TyKind::Int(IntTy::I128)), + } + } + + fn from_int_ty(cx: &C, ity: IntTy) -> Integer { + use Integer::*; + match ity { + IntTy::I8 => I8, + IntTy::I16 => I16, + IntTy::I32 => I32, + IntTy::I64 => I64, + IntTy::I128 => I128, + IntTy::Isize => cx.data_layout().ptr_sized_integer(), + } + } + fn from_uint_ty(cx: &C, ity: UintTy) -> Integer { + use Integer::*; + match ity { + UintTy::U8 => I8, + UintTy::U16 => I16, + UintTy::U32 => I32, + UintTy::U64 => I64, + UintTy::U128 => I128, + UintTy::Usize => cx.data_layout().ptr_sized_integer(), + } + } + + /// Finds the appropriate Integer type and signedness for the given + /// signed discriminant range and `#[repr]` attribute. + /// N.B.: `u128` values above `i128::MAX` will be treated as signed, but + /// that shouldn't affect anything, other than maybe debuginfo. + fn repr_discr<'db>( + interner: DbInterner<'db>, + ty: Ty<'db>, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> (Integer, bool) { + // Theoretically, negative values could be larger in unsigned representation + // than the unsigned representation of the signed minimum. However, if there + // are any negative values, the only valid unsigned representation is u128 + // which can fit all i128 values, so the result remains unaffected. + let unsigned_fit = Integer::fit_unsigned(std::cmp::max(min as u128, max as u128)); + let signed_fit = std::cmp::max(Integer::fit_signed(min), Integer::fit_signed(max)); + + if let Some(ity) = repr.int { + let discr = Integer::from_attr(&interner, ity); + let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; + if discr < fit { + panic!( + "Integer::repr_discr: `#[repr]` hint too small for \ + discriminant range of enum `{ty:?}`" + ) + } + return (discr, ity.is_signed()); + } + + let at_least = if repr.c() { + // This is usually I32, however it can be different on some platforms, + // notably hexagon and arm-none/thumb-none + interner.data_layout().c_enum_min_size + } else { + // repr(Rust) enums try to be as small as possible + Integer::I8 + }; + + // If there are no negative values, we can use the unsigned fit. + if min >= 0 { + (std::cmp::max(unsigned_fit, at_least), false) + } else { + (std::cmp::max(signed_fit, at_least), true) + } + } +} + +pub trait FloatExt { + fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db>; + fn from_float_ty(fty: FloatTy) -> Self; +} + +impl FloatExt for Float { + #[inline] + fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> { + use Float::*; + match *self { + F16 => Ty::new(interner, TyKind::Float(FloatTy::F16)), + F32 => Ty::new(interner, TyKind::Float(FloatTy::F32)), + F64 => Ty::new(interner, TyKind::Float(FloatTy::F64)), + F128 => Ty::new(interner, TyKind::Float(FloatTy::F128)), + } + } + + fn from_float_ty(fty: FloatTy) -> Self { + use Float::*; + match fty { + FloatTy::F16 => F16, + FloatTy::F32 => F32, + FloatTy::F64 => F64, + FloatTy::F128 => F128, + } + } +} + +pub trait PrimitiveExt { + fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db>; + fn to_int_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db>; +} + +impl PrimitiveExt for Primitive { + #[inline] + fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> { + match *self { + Primitive::Int(i, signed) => i.to_ty(interner, signed), + Primitive::Float(f) => f.to_ty(interner), + Primitive::Pointer(_) => Ty::new( + interner, + TyKind::RawPtr( + Ty::new(interner, TyKind::Tuple(Default::default())), + rustc_ast_ir::Mutability::Mut, + ), + ), + } + } + + /// Return an *integer* type matching this primitive. + /// Useful in particular when dealing with enum discriminants. + #[inline] + fn to_int_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> { + match *self { + Primitive::Int(i, signed) => i.to_ty(interner, signed), + Primitive::Pointer(_) => { + let signed = false; + interner.data_layout().ptr_sized_integer().to_ty(interner, signed) + } + Primitive::Float(_) => panic!("floats do not have an int type"), + } + } +} + +impl<'db> HasDataLayout for DbInterner<'db> { + fn data_layout(&self) -> &rustc_abi::TargetDataLayout { + unimplemented!() + } +} + +pub trait CoroutineArgsExt<'db> { + fn discr_ty(&self, interner: DbInterner<'db>) -> Ty<'db>; +} + +impl<'db> CoroutineArgsExt<'db> for CoroutineArgs> { + /// The type of the state discriminant used in the coroutine type. + #[inline] + fn discr_ty(&self, interner: DbInterner<'db>) -> Ty<'db> { + Ty::new(interner, TyKind::Uint(UintTy::U32)) + } +} + +/// Finds the max universe present +pub struct MaxUniverse { + max_universe: UniverseIndex, +} + +impl Default for MaxUniverse { + fn default() -> Self { + Self::new() + } +} + +impl MaxUniverse { + pub fn new() -> Self { + MaxUniverse { max_universe: UniverseIndex::ROOT } + } + + pub fn max_universe(self) -> UniverseIndex { + self.max_universe + } +} + +impl<'db> TypeVisitor> for MaxUniverse { + type Result = (); + + fn visit_ty(&mut self, t: Ty<'db>) { + if let TyKind::Placeholder(placeholder) = t.kind() { + self.max_universe = UniverseIndex::from_u32( + self.max_universe.as_u32().max(placeholder.universe.as_u32()), + ); + } + + t.super_visit_with(self) + } + + fn visit_const(&mut self, c: Const<'db>) { + if let ConstKind::Placeholder(placeholder) = c.kind() { + self.max_universe = UniverseIndex::from_u32( + self.max_universe.as_u32().max(placeholder.universe.as_u32()), + ); + } + + c.super_visit_with(self) + } + + fn visit_region(&mut self, r: Region<'db>) { + if let RegionKind::RePlaceholder(placeholder) = r.kind() { + self.max_universe = UniverseIndex::from_u32( + self.max_universe.as_u32().max(placeholder.universe.as_u32()), + ); + } + } +} + +pub struct BottomUpFolder<'db, F, G, H> +where + F: FnMut(Ty<'db>) -> Ty<'db>, + G: FnMut(Region<'db>) -> Region<'db>, + H: FnMut(Const<'db>) -> Const<'db>, +{ + pub interner: DbInterner<'db>, + pub ty_op: F, + pub lt_op: G, + pub ct_op: H, +} + +impl<'db, F, G, H> TypeFolder> for BottomUpFolder<'db, F, G, H> +where + F: FnMut(Ty<'db>) -> Ty<'db>, + G: FnMut(Region<'db>) -> Region<'db>, + H: FnMut(Const<'db>) -> Const<'db>, +{ + fn cx(&self) -> DbInterner<'db> { + self.interner + } + + fn fold_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { + let t = ty.super_fold_with(self); + (self.ty_op)(t) + } + + fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { + // This one is a little different, because `super_fold_with` is not + // implemented on non-recursive `Region`. + (self.lt_op)(r) + } + + fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> { + let ct = ct.super_fold_with(self); + (self.ct_op)(ct) + } +} + +pub(crate) fn for_trait_impls( + db: &dyn HirDatabase, + krate: Crate, + block: Option, + trait_id: hir_def::TraitId, + self_ty_fp: Option, + mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>, +) -> ControlFlow<()> { + // Note: Since we're using `impls_for_trait` and `impl_provided_for`, + // only impls where the trait can be resolved should ever reach Chalk. + // `impl_datum` relies on that and will panic if the trait can't be resolved. + let in_self_and_deps = db.trait_impls_in_deps(krate); + let trait_module = trait_id.module(db); + let type_module = match self_ty_fp { + Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(db)), + Some(TyFingerprint::ForeignType(type_id)) => Some(from_foreign_def_id(type_id).module(db)), + Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(db)), + _ => None, + }; + + let mut def_blocks = + [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]; + + let block_impls = iter::successors(block, |&block_id| { + cov_mark::hit!(block_local_impls); + block_id.loc(db).module.containing_block() + }) + .inspect(|&block_id| { + // make sure we don't search the same block twice + def_blocks.iter_mut().for_each(|block| { + if *block == Some(block_id) { + *block = None; + } + }); + }) + .filter_map(|block_id| db.trait_impls_in_block(block_id)); + for it in in_self_and_deps.iter().map(ops::Deref::deref) { + f(it)?; + } + for it in block_impls { + f(&it)?; + } + for it in def_blocks.into_iter().flatten().filter_map(|it| db.trait_impls_in_block(it)) { + f(&it)?; + } + ControlFlow::Continue(()) +} + +// FIXME(next-trait-solver): uplift +pub fn sizedness_constraint_for_ty<'db>( + interner: DbInterner<'db>, + sizedness: SizedTraitKind, + ty: Ty<'db>, +) -> Option> { + use rustc_type_ir::TyKind::*; + + match ty.kind() { + // these are always sized + Bool | Char | Int(..) | Uint(..) | Float(..) | RawPtr(..) | Ref(..) | FnDef(..) + | FnPtr(..) | Array(..) | Closure(..) | CoroutineClosure(..) | Coroutine(..) + | CoroutineWitness(..) | Never => None, + + // these are never sized + Str | Slice(..) | Dynamic(_, _) => match sizedness { + // Never `Sized` + SizedTraitKind::Sized => Some(ty), + // Always `MetaSized` + SizedTraitKind::MetaSized => None, + }, + + // Maybe `Sized` or `MetaSized` + Param(..) | Alias(..) | Error(_) => Some(ty), + + // We cannot instantiate the binder, so just return the *original* type back, + // but only if the inner type has a sized constraint. Thus we skip the binder, + // but don't actually use the result from `sized_constraint_for_ty`. + UnsafeBinder(inner_ty) => { + sizedness_constraint_for_ty(interner, sizedness, inner_ty.skip_binder()).map(|_| ty) + } + + // Never `MetaSized` or `Sized` + Foreign(..) => Some(ty), + + // Recursive cases + Pat(ty, _) => sizedness_constraint_for_ty(interner, sizedness, ty), + + Tuple(tys) => tys + .into_iter() + .last() + .and_then(|ty| sizedness_constraint_for_ty(interner, sizedness, ty)), + + Adt(adt, args) => { + let tail_ty = + EarlyBinder::bind(adt.all_field_tys(interner).skip_binder().into_iter().last()?) + .instantiate(interner, args); + sizedness_constraint_for_ty(interner, sizedness, tail_ty) + } + + Placeholder(..) | Bound(..) | Infer(..) => { + panic!("unexpected type `{ty:?}` in sizedness_constraint_for_ty") + } + } +} + +pub fn apply_args_to_binder<'db, T: TypeFoldable>>( + b: Binder<'db, T>, + args: GenericArgs<'db>, + interner: DbInterner<'db>, +) -> T { + let types = &mut |ty: BoundTy| args.as_slice()[ty.var.index()].expect_ty(); + let regions = &mut |region: BoundRegion| args.as_slice()[region.var.index()].expect_region(); + let consts = &mut |const_: BoundConst| args.as_slice()[const_.var.index()].expect_const(); + let mut instantiate = BoundVarReplacer::new(interner, FnMutDelegate { types, regions, consts }); + b.skip_binder().fold_with(&mut instantiate) +} + +pub(crate) fn mini_canonicalize<'db, T: TypeFoldable>>( + mut context: SolverContext<'db>, + val: T, +) -> Canonical, T> { + let mut canon = MiniCanonicalizer { + context: &mut context, + db: DebruijnIndex::ZERO, + vars: IndexMap::default(), + }; + let canon_val = val.fold_with(&mut canon); + let vars = canon.vars; + Canonical { + value: canon_val, + max_universe: UniverseIndex::from_u32(1), + variables: CanonicalVars::new_from_iter( + context.cx(), + vars.iter().enumerate().map(|(idx, (k, v))| match (*k).kind() { + GenericArgKind::Type(ty) => match ty.kind() { + TyKind::Int(..) | TyKind::Uint(..) => rustc_type_ir::CanonicalVarKind::Int, + TyKind::Float(..) => rustc_type_ir::CanonicalVarKind::Float, + _ => rustc_type_ir::CanonicalVarKind::Ty { + ui: UniverseIndex::ZERO, + sub_root: BoundVar::from_usize(idx), + }, + }, + GenericArgKind::Lifetime(_) => { + rustc_type_ir::CanonicalVarKind::Region(UniverseIndex::ZERO) + } + GenericArgKind::Const(_) => { + rustc_type_ir::CanonicalVarKind::Const(UniverseIndex::ZERO) + } + }), + ), + } +} + +struct MiniCanonicalizer<'a, 'db> { + context: &'a mut SolverContext<'db>, + db: DebruijnIndex, + vars: IndexMap, usize>, +} + +impl<'db> TypeFolder> for MiniCanonicalizer<'_, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.context.cx() + } + + fn fold_binder>>( + &mut self, + t: rustc_type_ir::Binder, T>, + ) -> rustc_type_ir::Binder, T> { + self.db.shift_in(1); + let res = t.map_bound(|t| t.fold_with(self)); + self.db.shift_out(1); + res + } + + fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { + match t.kind() { + rustc_type_ir::TyKind::Bound(db, _) => { + if db >= self.db { + panic!("Unexpected bound var"); + } + t + } + rustc_type_ir::TyKind::Infer(infer) => { + let t = match infer { + rustc_type_ir::InferTy::TyVar(vid) => { + self.context.opportunistic_resolve_ty_var(vid) + } + rustc_type_ir::InferTy::IntVar(vid) => { + self.context.opportunistic_resolve_int_var(vid) + } + rustc_type_ir::InferTy::FloatVar(vid) => { + self.context.opportunistic_resolve_float_var(vid) + } + _ => t, + }; + let len = self.vars.len(); + let var = *self.vars.entry(t.into()).or_insert(len); + Ty::new( + self.cx(), + TyKind::Bound( + self.db, + BoundTy { kind: super::BoundTyKind::Anon, var: BoundVar::from_usize(var) }, + ), + ) + } + _ => t.super_fold_with(self), + } + } + + fn fold_region( + &mut self, + r: as rustc_type_ir::Interner>::Region, + ) -> as rustc_type_ir::Interner>::Region { + match r.kind() { + RegionKind::ReBound(db, _) => { + if db >= self.db { + panic!("Unexpected bound var"); + } + r + } + RegionKind::ReVar(vid) => { + let len = self.vars.len(); + let var = *self.vars.entry(r.into()).or_insert(len); + Region::new( + self.cx(), + RegionKind::ReBound( + self.db, + BoundRegion { + kind: super::BoundRegionKind::Anon, + var: BoundVar::from_usize(var), + }, + ), + ) + } + _ => r, + } + } + + fn fold_const( + &mut self, + c: as rustc_type_ir::Interner>::Const, + ) -> as rustc_type_ir::Interner>::Const { + match c.kind() { + ConstKind::Bound(db, _) => { + if db >= self.db { + panic!("Unexpected bound var"); + } + c + } + ConstKind::Infer(infer) => { + let len = self.vars.len(); + let var = *self.vars.entry(c.into()).or_insert(len); + Const::new( + self.cx(), + ConstKind::Bound(self.db, BoundConst { var: BoundVar::from_usize(var) }), + ) + } + _ => c.super_fold_with(self), + } + } +} + +pub fn explicit_item_bounds<'db>( + interner: DbInterner<'db>, + def_id: SolverDefId, +) -> EarlyBinder<'db, Clauses<'db>> { + let db = interner.db(); + match def_id { + SolverDefId::TypeAliasId(type_alias) => { + let trait_ = match type_alias.lookup(db).container { + ItemContainerId::TraitId(t) => t, + _ => panic!("associated type not in trait"), + }; + + // Lower bounds -- we could/should maybe move this to a separate query in `lower` + let type_alias_data = db.type_alias_signature(type_alias); + let generic_params = generics(db, type_alias.into()); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + type_alias.into(), + LifetimeElisionKind::AnonymousReportError, + ); + + let item_args = GenericArgs::identity_for_item(interner, def_id); + let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); + + let mut bounds = Vec::new(); + for bound in &type_alias_data.bounds { + ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { + bounds.push(pred); + }); + } + + if !ctx.unsized_types.contains(&interner_ty) { + let sized_trait = LangItem::Sized + .resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate")); + let sized_bound = sized_trait.map(|trait_id| { + let trait_ref = TraitRef::new_from_args( + interner, + trait_id.into(), + GenericArgs::new_from_iter(interner, [interner_ty.into()]), + ); + Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )) + }); + bounds.extend(sized_bound); + bounds.shrink_to_fit(); + } + + rustc_type_ir::EarlyBinder::bind(Clauses::new_from_iter(interner, bounds)) + } + SolverDefId::InternedOpaqueTyId(id) => { + let full_id = db.lookup_intern_impl_trait_id(id); + match full_id { + crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { + let datas = db + .return_type_impl_traits_ns(func) + .expect("impl trait id without impl traits"); + let datas = (*datas).as_ref().skip_binder(); + let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())]; + EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone())) + } + crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => { + let datas = db + .type_alias_impl_traits_ns(alias) + .expect("impl trait id without impl traits"); + let datas = (*datas).as_ref().skip_binder(); + let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())]; + EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone())) + } + crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => { + if let Some((future_trait, future_output)) = LangItem::Future + .resolve_trait(db, interner.krate.expect("Must have interner.krate")) + .and_then(|trait_| { + let alias = trait_.trait_items(db).associated_type_by_name( + &hir_expand::name::Name::new_symbol_root(sym::Output.clone()), + )?; + Some((trait_, alias)) + }) + { + let args = GenericArgs::identity_for_item(interner, def_id); + let out = args.as_slice()[0]; + let mut predicates = vec![]; + + let item_ty = Ty::new_alias( + interner, + rustc_type_ir::AliasTyKind::Opaque, + AliasTy::new_from_args(interner, def_id, args), + ); + + let kind = PredicateKind::Clause(ClauseKind::Trait(TraitPredicate { + polarity: rustc_type_ir::PredicatePolarity::Positive, + trait_ref: TraitRef::new_from_args( + interner, + future_trait.into(), + GenericArgs::new_from_iter(interner, [item_ty.into()]), + ), + })); + predicates.push(Clause(Predicate::new( + interner, + Binder::bind_with_vars( + kind, + BoundVarKinds::new_from_iter( + interner, + [BoundVarKind::Ty(BoundTyKind::Anon)], + ), + ), + ))); + let sized_trait = LangItem::Sized + .resolve_trait(db, interner.krate.expect("Must have interner.krate")); + if let Some(sized_trait_) = sized_trait { + let kind = PredicateKind::Clause(ClauseKind::Trait(TraitPredicate { + polarity: rustc_type_ir::PredicatePolarity::Positive, + trait_ref: TraitRef::new_from_args( + interner, + sized_trait_.into(), + GenericArgs::new_from_iter(interner, [item_ty.into()]), + ), + })); + predicates.push(Clause(Predicate::new( + interner, + Binder::bind_with_vars( + kind, + BoundVarKinds::new_from_iter( + interner, + [BoundVarKind::Ty(BoundTyKind::Anon)], + ), + ), + ))); + } + let kind = + PredicateKind::Clause(ClauseKind::Projection(ProjectionPredicate { + projection_term: AliasTerm::new_from_args( + interner, + future_output.into(), + GenericArgs::new_from_iter(interner, [item_ty.into()]), + ), + term: match out.kind() { + GenericArgKind::Lifetime(lt) => panic!(), + GenericArgKind::Type(ty) => Term::Ty(ty), + GenericArgKind::Const(const_) => Term::Const(const_), + }, + })); + predicates.push(Clause(Predicate::new( + interner, + Binder::bind_with_vars( + kind, + BoundVarKinds::new_from_iter( + interner, + [BoundVarKind::Ty(BoundTyKind::Anon)], + ), + ), + ))); + EarlyBinder::bind(Clauses::new_from_iter(interner, predicates)) + } else { + // If failed to find Symbol’s value as variable is void: Future::Output, return empty bounds as fallback. + EarlyBinder::bind(Clauses::new_from_iter(interner, [])) + } + } + } + } + _ => panic!("Unexpected GeneridDefId"), + } +} + +pub struct ContainsTypeErrors; + +impl<'db> TypeVisitor> for ContainsTypeErrors { + type Result = ControlFlow<()>; + + fn visit_ty(&mut self, t: Ty<'db>) -> Self::Result { + match t.kind() { + rustc_type_ir::TyKind::Error(_) => ControlFlow::Break(()), + _ => t.super_visit_with(self), + } + } +} + +/// The inverse of [`BoundVarReplacer`]: replaces placeholders with the bound vars from which they came. +pub struct PlaceholderReplacer<'a, 'db> { + infcx: &'a InferCtxt<'db>, + mapped_regions: FxIndexMap, + mapped_types: FxIndexMap, BoundTy>, + mapped_consts: FxIndexMap, + universe_indices: &'a [Option], + current_index: DebruijnIndex, +} + +impl<'a, 'db> PlaceholderReplacer<'a, 'db> { + pub fn replace_placeholders>>( + infcx: &'a InferCtxt<'db>, + mapped_regions: FxIndexMap, + mapped_types: FxIndexMap, BoundTy>, + mapped_consts: FxIndexMap, + universe_indices: &'a [Option], + value: T, + ) -> T { + let mut replacer = PlaceholderReplacer { + infcx, + mapped_regions, + mapped_types, + mapped_consts, + universe_indices, + current_index: INNERMOST, + }; + value.fold_with(&mut replacer) + } +} + +impl<'db> TypeFolder> for PlaceholderReplacer<'_, 'db> { + fn cx(&self) -> DbInterner<'db> { + self.infcx.interner + } + + fn fold_binder>>( + &mut self, + t: Binder<'db, T>, + ) -> Binder<'db, T> { + if !t.has_placeholders() && !t.has_infer() { + return t; + } + self.current_index.shift_in(1); + let t = t.super_fold_with(self); + self.current_index.shift_out(1); + t + } + + fn fold_region(&mut self, r0: Region<'db>) -> Region<'db> { + let r1 = match r0.kind() { + RegionKind::ReVar(vid) => self + .infcx + .inner + .borrow_mut() + .unwrap_region_constraints() + .opportunistic_resolve_var(self.infcx.interner, vid), + _ => r0, + }; + + let r2 = match r1.kind() { + RegionKind::RePlaceholder(p) => { + let replace_var = self.mapped_regions.get(&p); + match replace_var { + Some(replace_var) => { + let index = self + .universe_indices + .iter() + .position(|u| matches!(u, Some(pu) if *pu == p.universe)) + .unwrap_or_else(|| panic!("Unexpected placeholder universe.")); + let db = DebruijnIndex::from_usize( + self.universe_indices.len() - index + self.current_index.as_usize() - 1, + ); + Region::new_bound(self.cx(), db, *replace_var) + } + None => r1, + } + } + _ => r1, + }; + + tracing::debug!(?r0, ?r1, ?r2, "fold_region"); + + r2 + } + + fn fold_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { + let ty = self.infcx.shallow_resolve(ty); + match ty.kind() { + TyKind::Placeholder(p) => { + let replace_var = self.mapped_types.get(&p); + match replace_var { + Some(replace_var) => { + let index = self + .universe_indices + .iter() + .position(|u| matches!(u, Some(pu) if *pu == p.universe)) + .unwrap_or_else(|| panic!("Unexpected placeholder universe.")); + let db = DebruijnIndex::from_usize( + self.universe_indices.len() - index + self.current_index.as_usize() - 1, + ); + Ty::new_bound(self.infcx.interner, db, *replace_var) + } + None => { + if ty.has_infer() { + ty.super_fold_with(self) + } else { + ty + } + } + } + } + + _ if ty.has_placeholders() || ty.has_infer() => ty.super_fold_with(self), + _ => ty, + } + } + + fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> { + let ct = self.infcx.shallow_resolve_const(ct); + if let ConstKind::Placeholder(p) = ct.kind() { + let replace_var = self.mapped_consts.get(&p); + match replace_var { + Some(replace_var) => { + let index = self + .universe_indices + .iter() + .position(|u| matches!(u, Some(pu) if *pu == p.universe)) + .unwrap_or_else(|| panic!("Unexpected placeholder universe.")); + let db = DebruijnIndex::from_usize( + self.universe_indices.len() - index + self.current_index.as_usize() - 1, + ); + Const::new_bound(self.infcx.interner, db, *replace_var) + } + None => { + if ct.has_infer() { + ct.super_fold_with(self) + } else { + ct + } + } + } + } else { + ct.super_fold_with(self) + } + } +} + +pub(crate) fn needs_normalization<'db, T: TypeVisitable>>( + infcx: &InferCtxt<'db>, + value: &T, +) -> bool { + let mut flags = TypeFlags::HAS_ALIAS; + + // Opaques are treated as rigid outside of `TypingMode::PostAnalysis`, + // so we can ignore those. + match infcx.typing_mode() { + // FIXME(#132279): We likely want to reveal opaques during post borrowck analysis + TypingMode::Coherence + | TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } + | TypingMode::PostBorrowckAnalysis { .. } => flags.remove(TypeFlags::HAS_TY_OPAQUE), + TypingMode::PostAnalysis => {} + } + + value.has_type_flags(flags) +} + +pub fn sizedness_fast_path<'db>( + tcx: DbInterner<'db>, + predicate: Predicate<'db>, + param_env: ParamEnv<'db>, +) -> bool { + // Proving `Sized`/`MetaSized`, very often on "obviously sized" types like + // `&T`, accounts for about 60% percentage of the predicates we have to prove. No need to + // canonicalize and all that for such cases. + if let PredicateKind::Clause(ClauseKind::Trait(trait_pred)) = predicate.kind().skip_binder() + && trait_pred.polarity == PredicatePolarity::Positive + { + let sizedness = match tcx.as_trait_lang_item(trait_pred.def_id()) { + Some(SolverTraitLangItem::Sized) => SizedTraitKind::Sized, + Some(SolverTraitLangItem::MetaSized) => SizedTraitKind::MetaSized, + _ => return false, + }; + + // FIXME(sized_hierarchy): this temporarily reverts the `sized_hierarchy` feature + // while a proper fix for `tests/ui/sized-hierarchy/incomplete-inference-issue-143992.rs` + // is pending a proper fix + if matches!(sizedness, SizedTraitKind::MetaSized) { + return true; + } + + if trait_pred.self_ty().has_trivial_sizedness(tcx, sizedness) { + tracing::debug!("fast path -- trivial sizedness"); + return true; + } + + if matches!(trait_pred.self_ty().kind(), TyKind::Param(_) | TyKind::Placeholder(_)) { + for clause in param_env.caller_bounds().iter() { + if let ClauseKind::Trait(clause_pred) = clause.kind().skip_binder() + && clause_pred.polarity == PredicatePolarity::Positive + && clause_pred.self_ty() == trait_pred.self_ty() + && (clause_pred.def_id() == trait_pred.def_id() + || (sizedness == SizedTraitKind::MetaSized + && tcx.is_trait_lang_item( + clause_pred.def_id(), + SolverTraitLangItem::Sized, + ))) + { + return true; + } + } + } + } + + false +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs index a4e077ba6359f..d2901f7fc53d2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs @@ -34,6 +34,40 @@ pub fn float_ty_to_string(ty: FloatTy) -> &'static str { } } +pub fn int_ty_to_string_ns(ty: rustc_type_ir::IntTy) -> &'static str { + use rustc_type_ir::IntTy; + match ty { + IntTy::Isize => "isize", + IntTy::I8 => "i8", + IntTy::I16 => "i16", + IntTy::I32 => "i32", + IntTy::I64 => "i64", + IntTy::I128 => "i128", + } +} + +pub fn uint_ty_to_string_ns(ty: rustc_type_ir::UintTy) -> &'static str { + use rustc_type_ir::UintTy; + match ty { + UintTy::Usize => "usize", + UintTy::U8 => "u8", + UintTy::U16 => "u16", + UintTy::U32 => "u32", + UintTy::U64 => "u64", + UintTy::U128 => "u128", + } +} + +pub fn float_ty_to_string_ns(ty: rustc_type_ir::FloatTy) -> &'static str { + use rustc_type_ir::FloatTy; + match ty { + FloatTy::F16 => "f16", + FloatTy::F32 => "f32", + FloatTy::F64 => "f64", + FloatTy::F128 => "f128", + } +} + pub(super) fn int_ty_from_builtin(t: BuiltinInt) -> IntTy { match t { BuiltinInt::Isize => IntTy::Isize, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs index 9d1238701bcfa..0a8ed2cf0cabd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs @@ -7,7 +7,7 @@ use hir_def::tt; use intern::{Symbol, sym}; use rustc_hash::{FxHashMap, FxHashSet}; -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct TargetFeatures { pub(crate) enabled: FxHashSet, } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs index 775136dc0cbf7..2a92aa52e0cd2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs @@ -3,8 +3,8 @@ use std::{fmt, panic, sync::Mutex}; use base_db::{ - CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb, SourceDatabase, - SourceRoot, SourceRootId, SourceRootInput, + CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Nonce, RootQueryDb, + SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, }; use hir_def::{ModuleId, db::DefDatabase, nameres::crate_def_map}; @@ -17,12 +17,12 @@ use test_utils::extract_annotations; use triomphe::Arc; #[salsa_macros::db] -#[derive(Clone)] pub(crate) struct TestDB { storage: salsa::Storage, files: Arc, crates_map: Arc, events: Arc>>>, + nonce: Nonce, } impl Default for TestDB { @@ -41,6 +41,7 @@ impl Default for TestDB { events, files: Default::default(), crates_map: Default::default(), + nonce: Nonce::new(), }; this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); // This needs to be here otherwise `CrateGraphBuilder` panics. @@ -50,6 +51,18 @@ impl Default for TestDB { } } +impl Clone for TestDB { + fn clone(&self) -> Self { + Self { + storage: self.storage.clone(), + files: self.files.clone(), + crates_map: self.crates_map.clone(), + events: self.events.clone(), + nonce: Nonce::new(), + } + } +} + impl fmt::Debug for TestDB { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("TestDB").finish() @@ -109,6 +122,10 @@ impl SourceDatabase for TestDB { fn crates_map(&self) -> Arc { self.crates_map.clone() } + + fn nonce_and_revision(&self) -> (Nonce, salsa::Revision) { + (self.nonce, salsa::plumbing::ZalsaDatabase::zalsa(self).current_revision()) + } } #[salsa_macros::db] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 9605a0b4124d8..1c3da438cb364 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -6,14 +6,12 @@ mod incremental; mod macros; mod method_resolution; mod never_type; +mod opaque_types; mod patterns; mod regression; mod simple; +mod trait_aliases; mod traits; -mod type_alias_impl_traits; - -use std::env; -use std::sync::LazyLock; use base_db::{Crate, SourceDatabase}; use expect_test::Expect; @@ -35,8 +33,6 @@ use syntax::{ ast::{self, AstNode, HasName}, }; use test_fixture::WithFixture; -use tracing_subscriber::{Registry, layer::SubscriberExt}; -use tracing_tree::HierarchicalLayer; use triomphe::Arc; use crate::{ @@ -44,6 +40,7 @@ use crate::{ db::HirDatabase, display::{DisplayTarget, HirDisplay}, infer::{Adjustment, TypeMismatch}, + setup_tracing, test_db::TestDB, }; @@ -51,23 +48,6 @@ use crate::{ // against snapshots of the expected results using expect. Use // `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots. -fn setup_tracing() -> Option { - static ENABLE: LazyLock = LazyLock::new(|| env::var("CHALK_DEBUG").is_ok()); - if !*ENABLE { - return None; - } - - let filter: tracing_subscriber::filter::Targets = - env::var("CHALK_DEBUG").ok().and_then(|it| it.parse().ok()).unwrap_or_default(); - let layer = HierarchicalLayer::default() - .with_indent_lines(true) - .with_ansi(false) - .with_indent_amount(2) - .with_writer(std::io::stderr); - let subscriber = Registry::default().with(filter).with(layer); - Some(tracing::subscriber::set_default(subscriber)) -} - #[track_caller] fn check_types(#[rust_analyzer::rust_fixture] ra_fixture: &str) { check_impl(ra_fixture, false, true, false) @@ -177,11 +157,13 @@ fn check_impl( }; let range = node.as_ref().original_file_range_rooted(&db); if let Some(expected) = types.remove(&range) { - let actual = if display_source { - ty.display_source_code(&db, def.module(&db), true).unwrap() - } else { - ty.display_test(&db, display_target).to_string() - }; + let actual = salsa::attach(&db, || { + if display_source { + ty.display_source_code(&db, def.module(&db), true).unwrap() + } else { + ty.display_test(&db, display_target).to_string() + } + }); assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range); } } @@ -193,11 +175,13 @@ fn check_impl( }; let range = node.as_ref().original_file_range_rooted(&db); if let Some(expected) = types.remove(&range) { - let actual = if display_source { - ty.display_source_code(&db, def.module(&db), true).unwrap() - } else { - ty.display_test(&db, display_target).to_string() - }; + let actual = salsa::attach(&db, || { + if display_source { + ty.display_source_code(&db, def.module(&db), true).unwrap() + } else { + ty.display_test(&db, display_target).to_string() + } + }); assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range); } if let Some(expected) = adjustments.remove(&range) { @@ -223,11 +207,13 @@ fn check_impl( continue; }; let range = node.as_ref().original_file_range_rooted(&db); - let actual = format!( - "expected {}, got {}", - mismatch.expected.display_test(&db, display_target), - mismatch.actual.display_test(&db, display_target) - ); + let actual = salsa::attach(&db, || { + format!( + "expected {}, got {}", + mismatch.expected.display_test(&db, display_target), + mismatch.actual.display_test(&db, display_target) + ) + }); match mismatches.remove(&range) { Some(annotation) => assert_eq!(actual, annotation), None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual), @@ -422,7 +408,9 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { for (def, krate) in defs { let (body, source_map) = db.body_with_source_map(def); let infer = db.infer(def); - infer_def(infer, body, source_map, krate); + salsa::attach(&db, || { + infer_def(infer, body, source_map, krate); + }) } buf.truncate(buf.trim_end().len()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs index dbc68eeba1e64..b001ac1e82ea2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs @@ -12,9 +12,10 @@ use crate::display::{DisplayTarget, HirDisplay}; use crate::mir::MirSpan; use crate::test_db::TestDB; -use super::visit_module; +use super::{setup_tracing, visit_module}; fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { + let _tracing = setup_tracing(); let (db, file_id) = TestDB::with_single_file(ra_fixture); let module = db.module_for_file(file_id.file_id(&db)); let def_map = module.def_map(&db); @@ -66,11 +67,13 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec .join(", "), }; let place = capture.display_place(closure.0, db); - let capture_ty = capture - .ty - .skip_binders() - .display_test(db, DisplayTarget::from_crate(db, module.krate())) - .to_string(); + let capture_ty = salsa::attach(db, || { + capture + .ty + .skip_binders() + .display_test(db, DisplayTarget::from_crate(db, module.krate())) + .to_string() + }); let spans = capture .spans() .iter() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs index 3894b4b6f7bad..1735f550b8ad7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs @@ -49,7 +49,7 @@ fn let_stmt_coerce() { //- minicore: coerce_unsized fn test() { let x: &[isize] = &[1]; - // ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize) let x: *const [isize] = &[1]; // ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize) } @@ -96,7 +96,7 @@ fn foo(x: &[T]) -> &[T] { x } fn test() { let x = if true { foo(&[1]) - // ^^^^ adjustments: Deref(None), Borrow(Ref('?8, Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize) } else { &[1] }; @@ -148,7 +148,7 @@ fn foo(x: &[T]) -> &[T] { x } fn test(i: i32) { let x = match i { 2 => foo(&[2]), - // ^^^^ adjustments: Deref(None), Borrow(Ref('?8, Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize) 1 => &[1], _ => &[3], }; @@ -177,21 +177,23 @@ fn test(i: i32) { #[test] fn coerce_merge_one_by_one1() { - cov_mark::check!(coerce_merge_fail_fallback); - check( r" fn test() { let t = &mut 1; let x = match 1 { 1 => t as *mut i32, - //^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer) + //^ adjustments: Deref(None), Borrow(RawPtr(Mut)) + _ => t as *const i32, + }; + x; + // ^ type: *const i32 + let x = match 1 { + 1 => t as *mut i32, 2 => t as &i32, //^^^^^^^^^ expected *mut i32, got &'? i32 _ => t as *const i32, }; - x; - //^ type: *const i32 } ", @@ -276,17 +278,19 @@ fn test() { fn coerce_autoderef_implication_1() { check_no_mismatches( r" -//- minicore: deref -struct Foo; +//- minicore: deref, phantom_data +use core::marker::PhantomData; + +struct Foo(PhantomData); impl core::ops::Deref for Foo { type Target = (); } fn takes_ref_foo(x: &Foo) {} fn test() { - let foo = Foo; + let foo = Foo(PhantomData); //^^^ type: Foo<{unknown}> takes_ref_foo(&foo); - let foo = Foo; + let foo = Foo(PhantomData); //^^^ type: Foo let _: &() = &foo; }", @@ -297,16 +301,18 @@ fn test() { fn coerce_autoderef_implication_2() { check( r" -//- minicore: deref -struct Foo; +//- minicore: deref, phantom_data +use core::marker::PhantomData; + +struct Foo(PhantomData); impl core::ops::Deref for Foo { type Target = (); } fn takes_ref_foo(x: &Foo) {} fn test() { - let foo = Foo; + let foo = Foo(PhantomData); //^^^ type: Foo<{unknown}> - let _: &u32 = &Foo; - //^^^^ expected &'? u32, got &'? Foo<{unknown}> + let _: &u32 = &Foo(PhantomData); + //^^^^^^^^^^^^^^^^^ expected &'? u32, got &'? Foo<{unknown}> }", ); } @@ -409,8 +415,6 @@ fn test() { #[test] fn coerce_fn_items_in_match_arms() { - cov_mark::check!(coerce_fn_reification); - check_no_mismatches( r" fn foo1(x: u32) -> isize { 1 } @@ -484,6 +488,8 @@ fn test() { ); } +// FIXME(next-solver): We could learn more from the `&S` -> `&dyn Foo` coercion if we followed the rustc model +// where unsized is successful if all unsizing trait goals are certain (and non-unsizing goals are delayed). #[test] fn coerce_unsize_trait_object_simple() { check_types( @@ -503,8 +509,8 @@ fn test() { //^ S let obj: &dyn Bar<_, i8, i16> = &S; //^ S - let obj: &dyn Foo = &S; - //^ S + //let obj: &dyn Foo = &S; + // S<{unknown}, {unknown}> }"#, ); } @@ -543,9 +549,9 @@ struct Bar(Foo); fn test() { let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] }; - //^^^^^^^^^^^^^^^^^^^^^ expected &'? Foo<[usize]>, got &'? Foo<[i32; 3]> + //^^^^^^^^^^^^^^^^^^^^^ type: &'? Foo<[usize; 3]> let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); - //^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &'? Bar<[usize]>, got &'? Bar<[i32; 3]> + //^^^^^^^^^^^^^^^^^^^^^^^^^^ type: &'? Bar<[usize; 3]> } "#, ); @@ -681,9 +687,9 @@ fn coerce_unsize_expected_type_2() { check_no_mismatches( r#" //- minicore: coerce_unsized -struct InFile; +struct InFile(T); impl InFile { - fn with_value(self, value: U) -> InFile { InFile } + fn with_value(self, value: U) -> InFile { InFile(loop {}) } } struct RecordField; trait AstNode {} @@ -692,7 +698,7 @@ impl AstNode for RecordField {} fn takes_dyn(it: InFile<&dyn AstNode>) {} fn test() { - let x: InFile<()> = InFile; + let x: InFile<()> = InFile(()); let n = &RecordField; takes_dyn(x.with_value(n)); } @@ -879,7 +885,7 @@ fn adjust_index() { fn test() { let x = [1, 2, 3]; x[2] = 6; - // ^ adjustments: Borrow(Ref('?8, Mut)) + // ^ adjustments: Borrow(Ref('?0, Mut)) } ", ); @@ -899,16 +905,16 @@ impl core::ops::Index for StructMut { fn index(&self, index: usize) -> &Self::Output { &() } } -impl core::ops::IndexMut for StructMut { +impl core::ops::IndexMut for StructMut { fn index_mut(&mut self, index: usize) -> &mut Self::Output { &mut () } } fn test() { Struct[0]; - // ^^^^^^ adjustments: Borrow(Ref('?2, Not)) + // ^^^^^^ adjustments: Borrow(Ref('?0, Not)) StructMut[0]; - // ^^^^^^^^^ adjustments: Borrow(Ref('?5, Not)) + // ^^^^^^^^^ adjustments: Borrow(Ref('?1, Not)) &mut StructMut[0]; - // ^^^^^^^^^ adjustments: Borrow(Ref('?8, Mut)) + // ^^^^^^^^^ adjustments: Borrow(Ref('?2, Mut)) }", ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs index 855034117c0d7..f257aa1b6e602 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs @@ -89,7 +89,6 @@ fn test(x: bool) { //^^^^ expected (), got &'static str } match x { true => true, false => 0 } - //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected (), got bool //^ expected bool, got i32 () } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index 3159499e86707..c0b930e5e1231 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -48,7 +48,6 @@ fn foo() -> i32 { "expr_scopes_shim", "lang_item", "crate_lang_items", - "lang_item", ] "#]], ); @@ -138,7 +137,6 @@ fn baz() -> i32 { "crate_lang_items", "attrs_shim", "attrs_shim", - "lang_item", "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", @@ -519,6 +517,7 @@ impl SomeStruct { ); } +// FIXME(next-solver): does this test make sense with fast path? #[test] fn add_struct_invalidates_trait_solve() { let (mut db, file_id) = TestDB::with_single_file( @@ -559,7 +558,7 @@ fn main() { let _inference_result = db.infer(def); } }, - &[("trait_solve_shim", 2)], + &[("trait_solve_shim", 0)], expect_test::expect![[r#" [ "source_root_crates_shim", @@ -587,6 +586,7 @@ fn main() { "crate_lang_items", "attrs_shim", "attrs_shim", + "generic_predicates_ns_shim", "return_type_impl_traits_shim", "infer_shim", "function_signature_shim", @@ -600,28 +600,21 @@ fn main() { "VariantFields::firewall_", "VariantFields::query_", "lang_item", + "lang_item", "inherent_impls_in_crate_shim", "impl_signature_shim", "impl_signature_with_source_map_shim", "callable_item_signature_shim", - "adt_variance_shim", - "variances_of_shim", - "trait_solve_shim", - "trait_datum_shim", - "generic_predicates_shim", - "adt_datum_shim", "trait_impls_in_deps_shim", "trait_impls_in_crate_shim", "impl_trait_with_diagnostics_shim", "impl_self_ty_with_diagnostics_shim", "type_for_adt_tracked", - "impl_datum_shim", - "generic_predicates_shim", - "program_clauses_for_chalk_env_shim", + "impl_trait_with_diagnostics_ns_shim", + "impl_self_ty_with_diagnostics_ns_shim", + "generic_predicates_ns_shim", "value_ty_shim", "generic_predicates_shim", - "trait_solve_shim", - "lang_item", ] "#]], ); @@ -693,6 +686,7 @@ fn main() { "attrs_shim", "attrs_shim", "attrs_shim", + "generic_predicates_ns_shim", "return_type_impl_traits_shim", "infer_shim", "function_signature_with_source_map_shim", @@ -703,10 +697,12 @@ fn main() { "impl_signature_with_source_map_shim", "impl_signature_shim", "callable_item_signature_shim", - "generic_predicates_shim", "trait_impls_in_crate_shim", "impl_trait_with_diagnostics_shim", "impl_self_ty_with_diagnostics_shim", + "impl_trait_with_diagnostics_ns_shim", + "impl_self_ty_with_diagnostics_ns_shim", + "generic_predicates_ns_shim", "generic_predicates_shim", ] "#]], diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs index ea7a113cae3f6..25b938c7078aa 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs @@ -194,20 +194,20 @@ fn expr_macro_def_expanded_in_various_places() { !0..6 '1isize': isize !0..6 '1isize': isize !0..6 '1isize': isize - 39..442 '{ ...!(); }': () + 39..442 '{ ...!(); }': {unknown} 73..94 'spam!(...am!())': {unknown} 100..119 'for _ ...!() {}': fn into_iter(isize) -> ::IntoIter - 100..119 'for _ ...!() {}': IntoIterator::IntoIter + 100..119 'for _ ...!() {}': ::IntoIter 100..119 'for _ ...!() {}': ! - 100..119 'for _ ...!() {}': IntoIterator::IntoIter - 100..119 'for _ ...!() {}': &'? mut IntoIterator::IntoIter - 100..119 'for _ ...!() {}': fn next>(&'? mut IntoIterator::IntoIter) -> Option< as Iterator>::Item> - 100..119 'for _ ...!() {}': Option> + 100..119 'for _ ...!() {}': ::IntoIter + 100..119 'for _ ...!() {}': &'? mut ::IntoIter + 100..119 'for _ ...!() {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> + 100..119 'for _ ...!() {}': Option<<{unknown} as Iterator>::Item> 100..119 'for _ ...!() {}': () 100..119 'for _ ...!() {}': () 100..119 'for _ ...!() {}': () 100..119 'for _ ...!() {}': () - 104..105 '_': IntoIterator::Item + 104..105 '_': {unknown} 117..119 '{}': () 124..134 '|| spam!()': impl Fn() -> isize 140..156 'while ...!() {}': ! @@ -288,20 +288,20 @@ fn expr_macro_rules_expanded_in_various_places() { !0..6 '1isize': isize !0..6 '1isize': isize !0..6 '1isize': isize - 53..456 '{ ...!(); }': () + 53..456 '{ ...!(); }': {unknown} 87..108 'spam!(...am!())': {unknown} 114..133 'for _ ...!() {}': fn into_iter(isize) -> ::IntoIter - 114..133 'for _ ...!() {}': IntoIterator::IntoIter + 114..133 'for _ ...!() {}': ::IntoIter 114..133 'for _ ...!() {}': ! - 114..133 'for _ ...!() {}': IntoIterator::IntoIter - 114..133 'for _ ...!() {}': &'? mut IntoIterator::IntoIter - 114..133 'for _ ...!() {}': fn next>(&'? mut IntoIterator::IntoIter) -> Option< as Iterator>::Item> - 114..133 'for _ ...!() {}': Option> + 114..133 'for _ ...!() {}': ::IntoIter + 114..133 'for _ ...!() {}': &'? mut ::IntoIter + 114..133 'for _ ...!() {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> + 114..133 'for _ ...!() {}': Option<<{unknown} as Iterator>::Item> 114..133 'for _ ...!() {}': () 114..133 'for _ ...!() {}': () 114..133 'for _ ...!() {}': () 114..133 'for _ ...!() {}': () - 118..119 '_': IntoIterator::Item + 118..119 '_': {unknown} 131..133 '{}': () 138..148 '|| spam!()': impl Fn() -> isize 154..170 'while ...!() {}': ! @@ -707,7 +707,7 @@ fn infer_builtin_macros_file() { expect![[r#" !0..6 '"file"': &'static str 63..87 '{ ...!(); }': () - 73..74 'x': &'static str + 73..74 'x': &'? str "#]], ); } @@ -745,7 +745,7 @@ fn infer_builtin_macros_concat() { expect![[r#" !0..13 '"helloworld!"': &'static str 65..121 '{ ...")); }': () - 75..76 'x': &'static str + 75..76 'x': &'? str "#]], ); } @@ -822,7 +822,7 @@ macro_rules! include_str {() => {}} fn main() { let a = include_str!("foo.rs"); a; -} //^ &'static str +} //^ &'? str //- /foo.rs hello @@ -849,7 +849,7 @@ macro_rules! m { fn main() { let a = include_str!(m!(".rs")); a; -} //^ &'static str +} //^ &'? str //- /foo.rs hello @@ -964,7 +964,7 @@ fn infer_builtin_macros_concat_with_lazy() { expect![[r#" !0..13 '"helloworld!"': &'static str 103..160 '{ ...")); }': () - 113..114 'x': &'static str + 113..114 'x': &'? str "#]], ); } @@ -979,7 +979,7 @@ fn infer_builtin_macros_env() { fn main() { let x = env!("foo"); - //^ &'static str + //^ &'? str } "#, ); @@ -993,7 +993,7 @@ fn infer_builtin_macros_option_env() { //- /main.rs env:foo=bar fn main() { let x = option_env!("foo"); - //^ Option<&'static str> + //^ Option<&'? str> } "#, ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs index c58ca6c67a8de..b14ce35aa99c8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs @@ -578,17 +578,17 @@ fn infer_trait_assoc_method_generics_3() { trait Trait { fn make() -> (Self, T); } - struct S; + struct S(T); impl Trait for S {} fn test() { let a = S::make(); } "#, expect![[r#" - 100..126 '{ ...e(); }': () - 110..111 'a': (S, i64) - 114..121 'S::make': fn make, i64>() -> (S, i64) - 114..123 'S::make()': (S, i64) + 103..129 '{ ...e(); }': () + 113..114 'a': (S, i64) + 117..124 'S::make': fn make, i64>() -> (S, i64) + 117..126 'S::make()': (S, i64) "#]], ); } @@ -600,7 +600,7 @@ fn infer_trait_assoc_method_generics_4() { trait Trait { fn make() -> (Self, T); } - struct S; + struct S(T); impl Trait for S {} impl Trait for S {} fn test() { @@ -609,13 +609,13 @@ fn infer_trait_assoc_method_generics_4() { } "#, expect![[r#" - 130..202 '{ ...e(); }': () - 140..141 'a': (S, i64) - 157..164 'S::make': fn make, i64>() -> (S, i64) - 157..166 'S::make()': (S, i64) - 176..177 'b': (S, i32) - 190..197 'S::make': fn make, i32>() -> (S, i32) - 190..199 'S::make()': (S, i32) + 133..205 '{ ...e(); }': () + 143..144 'a': (S, i64) + 160..167 'S::make': fn make, i64>() -> (S, i64) + 160..169 'S::make()': (S, i64) + 179..180 'b': (S, i32) + 193..200 'S::make': fn make, i32>() -> (S, i32) + 193..202 'S::make()': (S, i32) "#]], ); } @@ -627,7 +627,7 @@ fn infer_trait_assoc_method_generics_5() { trait Trait { fn make() -> (Self, T, U); } - struct S; + struct S(T); impl Trait for S {} fn test() { let a = >::make::(); @@ -635,13 +635,13 @@ fn infer_trait_assoc_method_generics_5() { } "#, expect![[r#" - 106..210 '{ ...>(); }': () - 116..117 'a': (S, i64, u8) - 120..149 '': fn make, i64, u8>() -> (S, i64, u8) - 120..151 '()': (S, i64, u8) - 161..162 'b': (S, i64, u8) - 181..205 'Trait:...::': fn make, i64, u8>() -> (S, i64, u8) - 181..207 'Trait:...()': (S, i64, u8) + 109..213 '{ ...>(); }': () + 119..120 'a': (S, i64, u8) + 123..152 '': fn make, i64, u8>() -> (S, i64, u8) + 123..154 '()': (S, i64, u8) + 164..165 'b': (S, i64, u8) + 184..208 'Trait:...::': fn make, i64, u8>() -> (S, i64, u8) + 184..210 'Trait:...()': (S, i64, u8) "#]], ); } @@ -1107,6 +1107,9 @@ fn method_resolution_slow() { // this can get quite slow if we set the solver size limit too high check_types( r#" +//- minicore: phantom_data +use core::marker::PhantomData; + trait SendX {} struct S1; impl SendX for S1 {} @@ -1115,17 +1118,17 @@ struct U1; trait Trait { fn method(self); } -struct X1 {} +struct X1(PhantomData<(A, B)>); impl SendX for X1 where A: SendX, B: SendX {} -struct S {} +struct S(PhantomData<(B, C)>); trait FnX {} impl Trait for S where C: FnX, B: SendX {} -fn test() { (S {}).method(); } - //^^^^^^^^^^^^^^^ () +fn test() { (S(PhantomData)).method(); } + //^^^^^^^^^^^^^^^^^^^^^^^^^ () "#, ); } @@ -1134,6 +1137,7 @@ fn test() { (S {}).method(); } fn dyn_trait_super_trait_not_in_scope() { check_infer( r#" + //- minicore: dispatch_from_dyn mod m { pub trait SuperTrait { fn foo(&self) -> u32 { 0 } @@ -1186,11 +1190,11 @@ fn test() { 89..109 '{ ... }': bool 99..103 'true': bool 123..167 '{ ...o(); }': () - 133..134 's': &'static S - 137..151 'unsafe { f() }': &'static S + 133..134 's': &'? S + 137..151 'unsafe { f() }': &'? S 146..147 'f': fn f() -> &'static S 146..149 'f()': &'static S - 157..158 's': &'static S + 157..158 's': &'? S 157..164 's.foo()': bool "#]], ); @@ -1309,7 +1313,7 @@ fn main() { fn dyn_trait_method_priority() { check_types( r#" -//- minicore: from +//- minicore: from, dispatch_from_dyn trait Trait { fn into(&self) -> usize { 0 } } @@ -1823,6 +1827,33 @@ fn test() { ); } +#[test] +fn deref_fun_3() { + check_types( + r#" +//- minicore: receiver + +struct A(T, U); +struct B(T); +struct C(T); + +impl core::ops::Deref for A, u32> { + type Target = B; + fn deref(&self) -> &B { &self.0 } +} + +fn make() -> T { loop {} } + +fn test() { + let a1 = A(make(), make()); + let _: usize = (*a1).0; + a1; + //^^ A, u32> +} +"#, + ); +} + #[test] fn deref_into_inference_var() { check_types( @@ -1848,9 +1879,9 @@ impl Foo { } fn test() { Foo.foo(); - //^^^ adjustments: Borrow(Ref('?1, Not)) + //^^^ adjustments: Borrow(Ref('?0, Not)) (&Foo).foo(); - // ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)) } "#, ); @@ -1864,7 +1895,7 @@ fn receiver_adjustment_unsize_array() { fn test() { let a = [1, 2, 3]; a.len(); -} //^ adjustments: Borrow(Ref('?7, Not)), Pointer(Unsize) +} //^ adjustments: Borrow(Ref('?0, Not)), Pointer(Unsize) "#, ); } @@ -2077,7 +2108,7 @@ impl Foo { } fn test() { Box::new(Foo).foo(); - //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)) + //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?0, Not)) } "#, ); @@ -2095,7 +2126,7 @@ impl Foo { use core::mem::ManuallyDrop; fn test() { ManuallyDrop::new(Foo).foo(); - //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?4, Not)) + //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?0, Not)) } "#, ); @@ -2163,9 +2194,9 @@ impl Receiver for Bar { fn main() { let bar = Bar; let _v1 = bar.foo1(); - //^^^ type: {unknown} + //^^^ type: i32 let _v2 = bar.foo2(); - //^^^ type: {unknown} + //^^^ type: bool } "#, ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs index 6a9135622deb6..af5290d720356 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs @@ -14,6 +14,8 @@ fn test() { ); } +// FIXME(next-solver): The never type fallback implemented in r-a no longer works properly because of +// `Coerce` predicates. We should reimplement fallback like rustc. #[test] fn infer_never2() { check_types( @@ -24,7 +26,7 @@ fn test() { let a = gen(); if false { a } else { loop {} }; a; -} //^ ! +} //^ {unknown} "#, ); } @@ -39,7 +41,7 @@ fn test() { let a = gen(); if false { loop {} } else { a }; a; - //^ ! + //^ {unknown} } "#, ); @@ -54,7 +56,7 @@ enum Option { None, Some(T) } fn test() { let a = if true { Option::None } else { Option::Some(return) }; a; -} //^ Option +} //^ Option<{unknown}> "#, ); } @@ -104,7 +106,7 @@ enum Option { None, Some(T) } fn test() { let a = if true { Option::None } else { Option::Some(return) }; a; - //^ Option<&'static str> + //^ Option<&'? str> match 42 { 42 => a, _ => Option::Some("str"), @@ -218,7 +220,7 @@ fn test(a: i32) { _ => loop {}, }; i; -} //^ ! +} //^ {unknown} "#, ); } @@ -362,12 +364,12 @@ fn diverging_expression_3_break() { 140..141 'x': u32 149..175 '{ for ...; }; }': u32 151..172 'for a ...eak; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter - 151..172 'for a ...eak; }': {unknown} + 151..172 'for a ...eak; }': <{unknown} as IntoIterator>::IntoIter 151..172 'for a ...eak; }': ! 151..172 'for a ...eak; }': {unknown} 151..172 'for a ...eak; }': &'? mut {unknown} 151..172 'for a ...eak; }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> - 151..172 'for a ...eak; }': Option<{unknown}> + 151..172 'for a ...eak; }': Option<<{unknown} as Iterator>::Item> 151..172 'for a ...eak; }': () 151..172 'for a ...eak; }': () 151..172 'for a ...eak; }': () @@ -379,12 +381,12 @@ fn diverging_expression_3_break() { 226..227 'x': u32 235..253 '{ for ... {}; }': u32 237..250 'for a in b {}': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter - 237..250 'for a in b {}': {unknown} + 237..250 'for a in b {}': <{unknown} as IntoIterator>::IntoIter 237..250 'for a in b {}': ! 237..250 'for a in b {}': {unknown} 237..250 'for a in b {}': &'? mut {unknown} 237..250 'for a in b {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> - 237..250 'for a in b {}': Option<{unknown}> + 237..250 'for a in b {}': Option<<{unknown} as Iterator>::Item> 237..250 'for a in b {}': () 237..250 'for a in b {}': () 237..250 'for a in b {}': () @@ -395,12 +397,12 @@ fn diverging_expression_3_break() { 304..305 'x': u32 313..340 '{ for ...; }; }': u32 315..337 'for a ...urn; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter - 315..337 'for a ...urn; }': {unknown} + 315..337 'for a ...urn; }': <{unknown} as IntoIterator>::IntoIter 315..337 'for a ...urn; }': ! 315..337 'for a ...urn; }': {unknown} 315..337 'for a ...urn; }': &'? mut {unknown} 315..337 'for a ...urn; }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> - 315..337 'for a ...urn; }': Option<{unknown}> + 315..337 'for a ...urn; }': Option<<{unknown} as Iterator>::Item> 315..337 'for a ...urn; }': () 315..337 'for a ...urn; }': () 315..337 'for a ...urn; }': () diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/opaque_types.rs similarity index 84% rename from src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs rename to src/tools/rust-analyzer/crates/hir-ty/src/tests/opaque_types.rs index e2b7bf379cc3b..40e4c28fcc0b9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/opaque_types.rs @@ -71,7 +71,7 @@ fn test() { let x = S3.baz(); //^ Binary> let y = x.1.0.bar(); - //^ Unary> + //^ Unary<::Item> } "#, ); @@ -134,6 +134,9 @@ static ALIAS: AliasTy = { "#, ); + // FIXME(next-solver): This should emit type mismatch error but leaving it for now + // as we should fully migrate into next-solver without chalk-ir and TAIT should be + // reworked on r-a to handle `#[define_opaque(T)]` check_infer_with_mismatches( r#" trait Trait {} @@ -159,3 +162,19 @@ static ALIAS: i32 = { "#]], ) } + +#[test] +fn leak_auto_traits() { + check_no_mismatches( + r#" +//- minicore: send +fn foo() -> impl Sized {} + +fn is_send(_: T) {} + +fn main() { + is_send(foo()); +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs index 4949d4016bf15..607daada42eb1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs @@ -41,19 +41,19 @@ fn infer_pattern() { 47..48 'x': &'? i32 58..59 'a': i32 62..63 'z': i32 - 73..79 '(c, d)': (i32, &'static str) + 73..79 '(c, d)': (i32, &'? str) 74..75 'c': i32 - 77..78 'd': &'static str - 82..94 '(1, "hello")': (i32, &'static str) + 77..78 'd': &'? str + 82..94 '(1, "hello")': (i32, &'? str) 83..84 '1': i32 86..93 '"hello"': &'static str 101..151 'for (e... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter - 101..151 'for (e... }': {unknown} + 101..151 'for (e... }': <{unknown} as IntoIterator>::IntoIter 101..151 'for (e... }': ! 101..151 'for (e... }': {unknown} 101..151 'for (e... }': &'? mut {unknown} 101..151 'for (e... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> - 101..151 'for (e... }': Option<({unknown}, {unknown})> + 101..151 'for (e... }': Option<<{unknown} as Iterator>::Item> 101..151 'for (e... }': () 101..151 'for (e... }': () 101..151 'for (e... }': () @@ -653,7 +653,7 @@ fn infer_generics_in_patterns() { fn infer_const_pattern() { check( r#" -enum Option { None } +enum Option { None, Some(T) } use Option::None; struct Foo; const Bar: usize = 1; @@ -721,8 +721,8 @@ fn test() { 72..171 '{ ... x); }': () 78..81 'foo': fn foo<&'? (i32, &'? str), i32, impl FnOnce(&'? (i32, &'? str)) -> i32>(&'? (i32, &'? str), impl FnOnce(&'? (i32, &'? str)) -> i32) -> i32 78..105 'foo(&(...y)| x)': i32 - 82..91 '&(1, "a")': &'? (i32, &'static str) - 83..91 '(1, "a")': (i32, &'static str) + 82..91 '&(1, "a")': &'? (i32, &'? str) + 83..91 '(1, "a")': (i32, &'? str) 84..85 '1': i32 87..90 '"a"': &'static str 93..104 '|&(x, y)| x': impl FnOnce(&'? (i32, &'? str)) -> i32 @@ -733,8 +733,8 @@ fn test() { 103..104 'x': i32 142..145 'foo': fn foo<&'? (i32, &'? str), &'? i32, impl FnOnce(&'? (i32, &'? str)) -> &'? i32>(&'? (i32, &'? str), impl FnOnce(&'? (i32, &'? str)) -> &'? i32) -> &'? i32 142..168 'foo(&(...y)| x)': &'? i32 - 146..155 '&(1, "a")': &'? (i32, &'static str) - 147..155 '(1, "a")': (i32, &'static str) + 146..155 '&(1, "a")': &'? (i32, &'? str) + 147..155 '(1, "a")': (i32, &'? str) 148..149 '1': i32 151..154 '"a"': &'static str 157..167 '|(x, y)| x': impl FnOnce(&'? (i32, &'? str)) -> &'? i32 diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index c4c17a93c9cd6..2ba1e2341b297 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -1,3 +1,5 @@ +mod new_solver; + use expect_test::expect; use super::{check_infer, check_no_mismatches, check_types}; @@ -86,6 +88,7 @@ fn bug_651() { #[test] fn recursive_vars() { + // FIXME: This isn't nice, but I guess as long as we don't hang/crash that's fine? check_infer( r#" fn test() { @@ -95,12 +98,12 @@ fn recursive_vars() { "#, expect![[r#" 10..47 '{ ...&y]; }': () - 20..21 'y': {unknown} - 24..31 'unknown': {unknown} - 37..44 '[y, &y]': [{unknown}; 2] - 38..39 'y': {unknown} - 41..43 '&y': &'? {unknown} - 42..43 'y': {unknown} + 20..21 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 24..31 'unknown': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 37..44 '[y, &y]': [&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}; 2] + 38..39 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 41..43 '&y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 42..43 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} "#]], ); } @@ -117,19 +120,19 @@ fn recursive_vars_2() { "#, expect![[r#" 10..79 '{ ...x)]; }': () - 20..21 'x': &'? {unknown} - 24..31 'unknown': &'? {unknown} - 41..42 'y': {unknown} - 45..52 'unknown': {unknown} - 58..76 '[(x, y..., &x)]': [(&'? {unknown}, {unknown}); 2] - 59..65 '(x, y)': (&'? {unknown}, {unknown}) - 60..61 'x': &'? {unknown} - 63..64 'y': {unknown} - 67..75 '(&y, &x)': (&'? {unknown}, {unknown}) - 68..70 '&y': &'? {unknown} - 69..70 'y': {unknown} - 72..74 '&x': &'? &'? {unknown} - 73..74 'x': &'? {unknown} + 20..21 'x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 24..31 'unknown': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 41..42 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 45..52 'unknown': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 58..76 '[(x, y..., &x)]': [(&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}, &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}); 2] + 59..65 '(x, y)': (&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}, &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}) + 60..61 'x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 63..64 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 67..75 '(&y, &x)': (&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}, &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}) + 68..70 '&y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 69..70 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 72..74 '&x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 73..74 'x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} "#]], ); } @@ -266,37 +269,37 @@ fn infer_std_crash_5() { expect![[r#" 26..322 '{ ... } }': () 32..320 'for co... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter - 32..320 'for co... }': {unknown} + 32..320 'for co... }': <{unknown} as IntoIterator>::IntoIter 32..320 'for co... }': ! 32..320 'for co... }': {unknown} 32..320 'for co... }': &'? mut {unknown} 32..320 'for co... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> - 32..320 'for co... }': Option<{unknown}> + 32..320 'for co... }': Option<<{unknown} as Iterator>::Item> 32..320 'for co... }': () 32..320 'for co... }': () 32..320 'for co... }': () 32..320 'for co... }': () - 36..43 'content': {unknown} + 36..43 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} 47..60 'doesnt_matter': {unknown} 61..320 '{ ... }': () - 75..79 'name': &'? {unknown} - 82..166 'if doe... }': &'? {unknown} + 75..79 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 82..166 'if doe... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} 85..98 'doesnt_matter': bool - 99..128 '{ ... }': &'? {unknown} - 113..118 'first': &'? {unknown} - 134..166 '{ ... }': &'? {unknown} - 148..156 '&content': &'? {unknown} - 149..156 'content': {unknown} - 181..188 'content': &'? {unknown} - 191..313 'if ICE... }': &'? {unknown} - 194..231 'ICE_RE..._VALUE': {unknown} + 99..128 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 113..118 'first': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 134..166 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 148..156 '&content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 149..156 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 181..188 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 191..313 'if ICE... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 194..231 'ICE_RE..._VALUE': bool 194..247 'ICE_RE...&name)': bool - 241..246 '&name': &'? &'? {unknown} - 242..246 'name': &'? {unknown} - 248..276 '{ ... }': &'? {unknown} - 262..266 'name': &'? {unknown} - 282..313 '{ ... }': {unknown} - 296..303 'content': {unknown} + 241..246 '&name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 242..246 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 248..276 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 262..266 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 282..313 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} + 296..303 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} "#]], ); } @@ -392,7 +395,7 @@ fn issue_2669() { r#" trait A {} trait Write {} - struct Response {} + struct Response(T); trait D { fn foo(); @@ -408,13 +411,13 @@ fn issue_2669() { } "#, expect![[r#" - 119..214 '{ ... }': () - 129..132 'end': fn end<{unknown}>() - 129..134 'end()': () - 163..208 '{ ... }': () - 181..183 '_x': ! - 190..197 'loop {}': ! - 195..197 '{}': () + 120..215 '{ ... }': () + 130..133 'end': fn end<{unknown}>() + 130..135 'end()': () + 164..209 '{ ... }': () + 182..184 '_x': ! + 191..198 'loop {}': ! + 196..198 '{}': () "#]], ) } @@ -626,7 +629,7 @@ fn issue_4053_diesel_where_clauses() { 65..69 'self': Self 267..271 'self': Self 466..470 'self': SelectStatement - 488..522 '{ ... }': () + 488..522 '{ ... }': as BoxedDsl>::Output 498..502 'self': SelectStatement 498..508 'self.order': O 498..515 'self.o...into()': dyn QueryFragment + '? @@ -773,7 +776,7 @@ fn issue_4800() { "#, expect![[r#" 379..383 'self': &'? mut PeerSet - 401..424 '{ ... }': dyn Future + '? + 401..424 '{ ... }': dyn Future + 'static 411..418 'loop {}': ! 416..418 '{}': () 575..579 'self': &'? mut Self @@ -781,6 +784,9 @@ fn issue_4800() { ); } +// FIXME(next-solver): Though `Repeat: IntoIterator` does not hold here, we +// should be able to do better at given type hints (with Chalk, we did `IntoIterator::Item>`) +// From what I can tell, the point of this test is to not panic though. #[test] fn issue_4966() { check_infer( @@ -794,7 +800,7 @@ fn issue_4966() { struct Map { f: F } - struct Vec {} + struct Vec { p: *mut T } impl core::ops::Deref for Vec { type Target = [T]; @@ -813,23 +819,23 @@ fn issue_4966() { } "#, expect![[r#" - 225..229 'iter': T - 244..246 '{}': Vec - 258..402 '{ ...r(); }': () - 268..273 'inner': Map f64> - 276..300 'Map { ... 0.0 }': Map f64> - 285..298 '|_: &f64| 0.0': impl Fn(&'? f64) -> f64 - 286..287 '_': &'? f64 - 295..298 '0.0': f64 - 311..317 'repeat': Repeat f64>> - 320..345 'Repeat...nner }': Repeat f64>> - 338..343 'inner': Map f64> - 356..359 'vec': Vec f64>>>> - 362..371 'from_iter': fn from_iter f64>>>, Repeat f64>>>(Repeat f64>>) -> Vec f64>>>> - 362..379 'from_i...epeat)': Vec f64>>>> - 372..378 'repeat': Repeat f64>> - 386..389 'vec': Vec f64>>>> - 386..399 'vec.foo_bar()': {unknown} + 236..240 'iter': T + 255..257 '{}': Vec + 269..413 '{ ...r(); }': () + 279..284 'inner': Map f64> + 287..311 'Map { ... 0.0 }': Map f64> + 296..309 '|_: &f64| 0.0': impl Fn(&'? f64) -> f64 + 297..298 '_': &'? f64 + 306..309 '0.0': f64 + 322..328 'repeat': Repeat f64>> + 331..356 'Repeat...nner }': Repeat f64>> + 349..354 'inner': Map f64> + 367..370 'vec': Vec<{unknown}> + 373..382 'from_iter': fn from_iter<{unknown}, Repeat f64>>>(Repeat f64>>) -> Vec<{unknown}> + 373..390 'from_i...epeat)': Vec<{unknown}> + 383..389 'repeat': Repeat f64>> + 397..400 'vec': Vec<{unknown}> + 397..410 'vec.foo_bar()': {unknown} "#]], ); } @@ -838,37 +844,40 @@ fn issue_4966() { fn issue_6628() { check_infer( r#" -//- minicore: fn -struct S(); +//- minicore: fn, phantom_data +use core::marker::PhantomData; + +struct S(PhantomData); impl S { fn f(&self, _t: T) {} fn g(&self, _f: F) {} } fn main() { - let s = S(); + let s = S(PhantomData); s.g(|_x| {}); s.f(10); } "#, expect![[r#" - 40..44 'self': &'? S - 46..48 '_t': T - 53..55 '{}': () - 81..85 'self': &'? S - 87..89 '_f': F - 94..96 '{}': () - 109..160 '{ ...10); }': () - 119..120 's': S - 123..124 'S': fn S() -> S - 123..126 'S()': S - 132..133 's': S - 132..144 's.g(|_x| {})': () - 136..143 '|_x| {}': impl FnOnce(&'? i32) - 137..139 '_x': &'? i32 - 141..143 '{}': () - 150..151 's': S - 150..157 's.f(10)': () - 154..156 '10': i32 + 86..90 'self': &'? S + 92..94 '_t': T + 99..101 '{}': () + 127..131 'self': &'? S + 133..135 '_f': F + 140..142 '{}': () + 155..217 '{ ...10); }': () + 165..166 's': S + 169..170 'S': fn S(PhantomData) -> S + 169..183 'S(PhantomData)': S + 171..182 'PhantomData': PhantomData + 189..190 's': S + 189..201 's.g(|_x| {})': () + 193..200 '|_x| {}': impl FnOnce(&'? i32) + 194..196 '_x': &'? i32 + 198..200 '{}': () + 207..208 's': S + 207..214 's.f(10)': () + 211..213 '10': i32 "#]], ); } @@ -926,7 +935,7 @@ fn lifetime_from_chalk_during_deref() { check_types( r#" //- minicore: deref -struct Box {} +struct Box(T); impl core::ops::Deref for Box { type Target = T; @@ -959,6 +968,9 @@ fn clone_iter(s: Iter) { fn issue_8686() { check_infer( r#" +//- minicore: phantom_data +use core::marker::PhantomData; + pub trait Try: FromResidual { type Output; type Residual; @@ -967,28 +979,32 @@ pub trait FromResidual::Residual> { fn from_residual(residual: R) -> Self; } -struct ControlFlow; +struct ControlFlow(PhantomData<(B, C)>); impl Try for ControlFlow { type Output = C; type Residual = ControlFlow; } impl FromResidual for ControlFlow { - fn from_residual(r: ControlFlow) -> Self { ControlFlow } + fn from_residual(r: ControlFlow) -> Self { ControlFlow(PhantomData) } } fn test() { - ControlFlow::from_residual(ControlFlow::); + ControlFlow::from_residual(ControlFlow::(PhantomData)); } "#, expect![[r#" - 144..152 'residual': R - 365..366 'r': ControlFlow - 395..410 '{ ControlFlow }': ControlFlow - 397..408 'ControlFlow': ControlFlow - 424..482 '{ ...!>); }': () - 430..456 'Contro...sidual': fn from_residual, ControlFlow>(ControlFlow) -> ControlFlow - 430..479 'Contro...2, !>)': ControlFlow - 457..478 'Contro...32, !>': ControlFlow + 176..184 'residual': R + 418..419 'r': ControlFlow + 448..476 '{ Cont...ata) }': ControlFlow + 450..461 'ControlFlow': fn ControlFlow(PhantomData<(B, C)>) -> ControlFlow + 450..474 'Contro...mData)': ControlFlow + 462..473 'PhantomData': PhantomData<(B, C)> + 490..561 '{ ...a)); }': () + 496..522 'Contro...sidual': fn from_residual, ControlFlow>(ControlFlow) -> ControlFlow + 496..558 'Contro...Data))': ControlFlow + 523..544 'Contro...32, !>': fn ControlFlow(PhantomData<(u32, !)>) -> ControlFlow + 523..557 'Contro...mData)': ControlFlow + 545..556 'PhantomData': PhantomData<(u32, !)> "#]], ); } @@ -1047,12 +1063,13 @@ fn impl_trait_in_option_9530() { check_types( r#" //- minicore: sized -struct Option; +struct Option(T); impl Option { fn unwrap(self) -> T { loop {} } } -fn make() -> Option { Option } +fn make() -> Option { Option(()) } trait Copy {} +impl Copy for () {} fn test() { let o = make(); o.unwrap(); @@ -1158,9 +1175,9 @@ pub trait BitView { pub struct Lsb0; -pub struct BitArray { } +pub struct BitArray(V); -pub struct BitSlice { } +pub struct BitSlice(T); impl core::ops::Deref for BitArray { type Target = BitSlice; @@ -1224,6 +1241,8 @@ fn mamba(a: U32!(), p: u32) -> u32 { #[test] fn for_loop_block_expr_iterable() { + // FIXME(next-solver): it would be nice to be able to hint `IntoIterator::IntoIter<()>` instead of just `{unknown}` + // (even though `(): IntoIterator` does not hold) check_infer( r#" //- minicore: iterator @@ -1236,17 +1255,17 @@ fn test() { expect![[r#" 10..68 '{ ... } }': () 16..66 'for _ ... }': fn into_iter<()>(()) -> <() as IntoIterator>::IntoIter - 16..66 'for _ ... }': IntoIterator::IntoIter<()> + 16..66 'for _ ... }': <() as IntoIterator>::IntoIter 16..66 'for _ ... }': ! - 16..66 'for _ ... }': IntoIterator::IntoIter<()> - 16..66 'for _ ... }': &'? mut IntoIterator::IntoIter<()> - 16..66 'for _ ... }': fn next>(&'? mut IntoIterator::IntoIter<()>) -> Option< as Iterator>::Item> - 16..66 'for _ ... }': Option> + 16..66 'for _ ... }': <() as IntoIterator>::IntoIter + 16..66 'for _ ... }': &'? mut <() as IntoIterator>::IntoIter + 16..66 'for _ ... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item> + 16..66 'for _ ... }': Option<<{unknown} as Iterator>::Item> 16..66 'for _ ... }': () 16..66 'for _ ... }': () 16..66 'for _ ... }': () 16..66 'for _ ... }': () - 20..21 '_': IntoIterator::Item<()> + 20..21 '_': {unknown} 25..39 '{ let x = 0; }': () 31..32 'x': i32 35..36 '0': i32 @@ -1283,7 +1302,6 @@ fn test() { #[test] fn bug_11242() { - // FIXME: wrong, should be u32 check_types( r#" fn foo() @@ -1292,7 +1310,7 @@ where B: IntoIterator, { let _x: ::Item; - // ^^ {unknown} + // ^^ u32 } pub trait Iterator { @@ -1495,7 +1513,7 @@ fn regression_11688_2() { fn regression_11688_3() { check_types( r#" - //- minicore: iterator + //- minicore: iterator, dispatch_from_dyn struct Ar(T); fn f( num_zeros: usize, @@ -1514,6 +1532,7 @@ fn regression_11688_3() { fn regression_11688_4() { check_types( r#" + //- minicore: dispatch_from_dyn trait Bar { fn baz(&self) -> [i32; C]; } @@ -1772,7 +1791,7 @@ fn regression_14844() { r#" pub type Ty = Unknown; -pub struct Inner(); +pub struct Inner(T); pub struct Outer { pub inner: Inner, @@ -1780,7 +1799,7 @@ pub struct Outer { fn main() { _ = Outer { - inner: Inner::(), + inner: Inner::(0), }; } "#, @@ -1932,7 +1951,7 @@ fn main() { Alias::Braced; //^^^^^^^^^^^^^ {unknown} let Alias::Braced = loop {}; - //^^^^^^^^^^^^^ ! + //^^^^^^^^^^^^^ {unknown} let Alias::Braced(..) = loop {}; //^^^^^^^^^^^^^^^^^ Enum @@ -2010,12 +2029,12 @@ fn tait_async_stack_overflow_17199() { fn lifetime_params_move_param_defaults() { check_types( r#" -pub struct Thing<'s, T = u32>; +pub struct Thing<'s, T = u32>(&'s T); impl <'s> Thing<'s> { pub fn new() -> Thing<'s> { - Thing - //^^^^^ Thing<'?, u32> + Thing(&0) + //^^^^^^^^^ Thing<'?, u32> } } @@ -2052,7 +2071,7 @@ impl S { } } -struct Wrap<'a, T>(T); +struct Wrap<'a, T>(&'a T); trait Trait<'a> { type Proj; } @@ -2294,10 +2313,10 @@ trait Foo { } "#, expect![[r#" - 83..86 'bar': Foo::Bar + 83..86 'bar': ::Bar 105..133 '{ ... }': () - 119..120 '_': Foo::Bar - 123..126 'bar': Foo::Bar + 119..120 '_': ::Bar + 123..126 'bar': ::Bar "#]], ); } @@ -2383,3 +2402,107 @@ pub trait Destruct {} "#, ); } + +#[test] +fn no_duplicated_lang_item_metadata() { + check_types( + r#" +//- minicore: pointee +//- /main.rs crate:main deps:std,core +use std::AtomicPtr; +use std::null_mut; + +fn main() { + let x: AtomicPtr<()> = AtomicPtr::new(null_mut()); + //^ AtomicPtr<()> +} + +//- /lib.rs crate:r#std deps:core +#![no_std] +pub use core::*; + +//- /lib.rs crate:r#core +#![no_core] + +#[lang = "pointee_trait"] +pub trait Pointee { + #[lang = "metadata_type"] + type Metadata; +} + +pub struct AtomicPtr(T); + +impl AtomicPtr { + pub fn new(p: *mut T) -> AtomicPtr { + loop {} + } +} + +#[lang = "pointee_sized"] +pub trait PointeeSized {} +#[lang = "meta_sized"] +pub trait MetaSized: PointeeSized {} +#[lang = "sized"] +pub trait Sized: MetaSized {} + +pub trait Thin = Pointee + PointeeSized; + +pub fn null_mut() -> *mut T { + loop {} +} +"#, + ); +} + +#[test] +fn issue_20484() { + check_no_mismatches( + r#" +struct Eth; + +trait FullBlockBody { + type Transaction; +} + +impl FullBlockBody for () { + type Transaction = (); +} + +trait NodePrimitives { + type BlockBody; + type SignedTx; +} + +impl NodePrimitives for () { + type BlockBody = (); + type SignedTx = (); +} + +impl NodePrimitives for Eth { + type BlockBody = (); + type SignedTx = (); +} + +trait FullNodePrimitives +where + Self: NodePrimitives>, +{ +} + +impl FullNodePrimitives for T where + T: NodePrimitives>, +{ +} + +fn node(_: N) +where + N: FullNodePrimitives, +{ +} + +fn main() { + node(Eth); +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs new file mode 100644 index 0000000000000..ead79a8f5b90b --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs @@ -0,0 +1,420 @@ +use expect_test::expect; + +use crate::tests::{check_infer, check_no_mismatches}; + +#[test] +fn regression_20365() { + check_infer( + r#" +//- minicore: iterator +struct Vec(T); +struct IntoIter(T); +impl IntoIterator for Vec { + type IntoIter = IntoIter; + type Item = T; +} +impl Iterator for IntoIter { + type Item = T; +} + +fn f(a: Vec) { + let iter = a.into_iter(); +} + +pub trait Space: IntoIterator { + type Ty: Space; +} +impl Space for [u8; 1] { + type Ty = Self; +} + "#, + expect![[r#" + 201..202 'a': Vec + 213..246 '{ ...r(); }': () + 223..227 'iter': IntoIter + 230..231 'a': Vec + 230..243 'a.into_iter()': IntoIter + "#]], + ); +} + +#[test] +fn regression_19971() { + check_infer( + r#" +//- minicore: pointee +fn make(_thin: *const (), _meta: core::ptr::DynMetadata) -> *const T +where + T: core::ptr::Pointee> + ?Sized, +{ + loop {} +} +trait Foo { + fn foo(&self) -> i32 { + loop {} + } +} + +fn test() -> i32 { + struct F {} + impl Foo for F {} + let meta = core::ptr::metadata(0 as *const F as *const dyn Foo); + + let f = F {}; + let fat_ptr = make(&f as *const F as *const (), meta); // <-- infers type as `*const {unknown}` + + let fat_ref = unsafe { &*fat_ptr }; // <-- infers type as `&{unknown}` + fat_ref.foo() // cannot 'go to definition' on `foo` +} + + "#, + expect![[r#" + 11..16 '_thin': *const () + 29..34 '_meta': DynMetadata + 155..170 '{ loop {} }': *const T + 161..168 'loop {}': ! + 166..168 '{}': () + 195..199 'self': &'? Self + 208..231 '{ ... }': i32 + 218..225 'loop {}': ! + 223..225 '{}': () + 252..613 '{ ...foo` }': i32 + 300..304 'meta': DynMetadata + 307..326 'core::...tadata': fn metadata(*const (dyn Foo + '?)) -> ::Metadata + 307..359 'core::...n Foo)': DynMetadata + 327..328 '0': usize + 327..340 '0 as *const F': *const F + 327..358 '0 as *...yn Foo': *const (dyn Foo + '?) + 370..371 'f': F + 374..378 'F {}': F + 388..395 'fat_ptr': *const (dyn Foo + '?) + 398..402 'make': fn make(*const (), DynMetadata) -> *const (dyn Foo + '?) + 398..437 'make(&... meta)': *const (dyn Foo + '?) + 403..405 '&f': &'? F + 403..417 '&f as *const F': *const F + 403..430 '&f as ...nst ()': *const () + 404..405 'f': F + 432..436 'meta': DynMetadata + 489..496 'fat_ref': &'? (dyn Foo + '?) + 499..519 'unsafe..._ptr }': &'? (dyn Foo + '?) + 508..517 '&*fat_ptr': &'? (dyn Foo + '?) + 509..517 '*fat_ptr': dyn Foo + '? + 510..517 'fat_ptr': *const (dyn Foo + '?) + 560..567 'fat_ref': &'? (dyn Foo + '?) + 560..573 'fat_ref.foo()': i32 + "#]], + ); +} + +#[test] +fn regression_19752() { + check_no_mismatches( + r#" +//- minicore: sized, copy +trait T1: Sized + Copy { + fn a(self, other: Self) -> Self { + other + } + + fn b(&mut self, other: Self) { + *self = self.a(other); + } +} + +trait T2: Sized { + type T1: T1; +} + "#, + ); +} + +#[test] +fn regression_type_checker_does_not_eagerly_select_predicates_from_where_clauses() { + // This was a very long standing issue (#5514) with a lot of duplicates, that was + // fixed by the switch to the new trait solver, so it deserves a long name and a + // honorable mention. + check_infer( + r#" +//- minicore: from + +struct Foo; +impl Foo { + fn method(self) -> i32 { 0 } +} + +fn f>(u: T) { + let x = u.into(); + x.method(); +} + "#, + expect![[r#" + 38..42 'self': Foo + 51..56 '{ 0 }': i32 + 53..54 '0': i32 + 79..80 'u': T + 85..126 '{ ...d(); }': () + 95..96 'x': Foo + 99..100 'u': T + 99..107 'u.into()': Foo + 113..114 'x': Foo + 113..123 'x.method()': i32 + "#]], + ); +} + +#[test] +fn opaque_generics() { + check_infer( + r#" +//- minicore: iterator +pub struct Grid {} + +impl<'a> IntoIterator for &'a Grid { + type Item = &'a (); + + type IntoIter = impl Iterator; + + fn into_iter(self) -> Self::IntoIter { + } +} + "#, + expect![[r#" + 150..154 'self': &'a Grid + 174..181 '{ }': impl Iterator + "#]], + ); +} + +#[test] +fn normalization() { + check_infer( + r#" +//- minicore: iterator, iterators +fn main() { + _ = [0i32].into_iter().filter_map(|_n| Some(1i32)); +} + "#, + expect![[r#" + 10..69 '{ ...2)); }': () + 16..17 '_': FilterMap, impl FnMut(i32) -> Option> + 16..66 '_ = [0...1i32))': () + 20..26 '[0i32]': [i32; 1] + 20..38 '[0i32]...iter()': IntoIter + 20..66 '[0i32]...1i32))': FilterMap, impl FnMut(i32) -> Option> + 21..25 '0i32': i32 + 50..65 '|_n| Some(1i32)': impl FnMut(i32) -> Option + 51..53 '_n': i32 + 55..59 'Some': fn Some(i32) -> Option + 55..65 'Some(1i32)': Option + 60..64 '1i32': i32 + "#]], + ); +} + +#[test] +fn regression_20487() { + check_no_mismatches( + r#" +//- minicore: coerce_unsized, dispatch_from_dyn +trait Foo { + fn bar(&self) -> u32 { + 0xCAFE + } +} + +fn debug(_: &dyn Foo) {} + +impl Foo for i32 {} + +fn main() { + debug(&1); +}"#, + ); +} + +#[test] +fn projection_is_not_associated_type() { + check_no_mismatches( + r#" +//- minicore: fn +trait Iterator { + type Item; + + fn partition(self, f: F) + where + F: FnMut(&Self::Item) -> bool, + { + } +} + +struct Iter; +impl Iterator for Iter { + type Item = i32; +} + +fn main() { + Iter.partition(|n| true); +} + "#, + ); +} + +#[test] +fn cast_error_type() { + check_infer( + r#" +fn main() { + let foo: [_; _] = [false] as _; +} + "#, + expect![[r#" + 10..47 '{ le...s _; }': () + 18..21 'foo': [bool; 1] + 32..39 '[false]': [bool; 1] + 32..44 '[false] as _': [bool; 1] + 33..38 'false': bool + "#]], + ); +} + +#[test] +fn no_infinite_loop_on_super_predicates_elaboration() { + check_infer( + r#" +//- minicore: sized +trait DimMax { + type Output: Dimension; +} + +trait Dimension: DimMax<:: Smaller, Output = Self> { + type Smaller: Dimension; +} + +fn test(t: T) +where + T: DimMax, + U: Dimension, +{ + let t: >::Output = loop {}; +} +"#, + expect![[r#" + 182..183 't': T + 230..280 '{ ... {}; }': () + 240..241 't': >::Output + 270..277 'loop {}': ! + 275..277 '{}': () + "#]], + ) +} + +#[test] +fn fn_coercion() { + check_no_mismatches( + r#" +fn foo() { + let _is_suffix_start: fn(&(usize, char)) -> bool = match true { + true => |(_, c)| *c == ' ', + _ => |(_, c)| *c == 'v', + }; +} + "#, + ); +} + +#[test] +fn coercion_with_errors() { + check_no_mismatches( + r#" +//- minicore: unsize, coerce_unsized +fn foo(_v: i32) -> [u8; _] { loop {} } +fn bar(_v: &[u8]) {} + +fn main() { + bar(&foo()); +} + "#, + ); +} + +#[test] +fn another_20654_case() { + check_no_mismatches( + r#" +//- minicore: sized, unsize, coerce_unsized, dispatch_from_dyn, fn +struct Region<'db>(&'db ()); + +trait TypeFoldable {} + +trait Interner { + type Region; + type GenericArg; +} + +struct DbInterner<'db>(&'db ()); +impl<'db> Interner for DbInterner<'db> { + type Region = Region<'db>; + type GenericArg = GenericArg<'db>; +} + +trait GenericArgExt> { + fn expect_region(&self) -> I::Region { + loop {} + } +} +impl<'db> GenericArgExt> for GenericArg<'db> {} + +enum GenericArg<'db> { + Region(Region<'db>), +} + +fn foo<'db, T: TypeFoldable>>(arg: GenericArg<'db>) { + let regions = &mut || arg.expect_region(); + let f: &'_ mut (dyn FnMut() -> Region<'db> + '_) = regions; +} + "#, + ); +} + +#[test] +fn trait_solving_with_error() { + check_infer( + r#" +//- minicore: size_of +struct Vec(T); + +trait Foo { + type Item; + fn to_vec(self) -> Vec { + loop {} + } +} + +impl<'a, T, const N: usize> Foo for &'a [T; N] { + type Item = T; +} + +fn to_bytes() -> [u8; _] { + loop {} +} + +fn foo() { + let _x = to_bytes().to_vec(); +} + "#, + expect![[r#" + 60..64 'self': Self + 85..108 '{ ... }': Vec<::Item> + 95..102 'loop {}': ! + 100..102 '{}': () + 208..223 '{ loop {} }': [u8; _] + 214..221 'loop {}': ! + 219..221 '{}': () + 234..271 '{ ...c(); }': () + 244..246 '_x': {unknown} + 249..257 'to_bytes': fn to_bytes() -> [u8; _] + 249..259 'to_bytes()': [u8; _] + 249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item> + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index b154e59878571..9d02a44c37c97 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -439,11 +439,11 @@ h"; 256..260 'true': bool 274..370 'r#" ... "#': &'static str 384..394 'br#"yolo"#': &'static [u8; 4] - 412..413 'a': &'static [u8; 4] + 412..413 'a': &'? [u8; 4] 416..440 'b"a\x2... c"': &'static [u8; 4] - 458..459 'b': &'static [u8; 4] + 458..459 'b': &'? [u8; 4] 462..470 'br"g\ h"': &'static [u8; 4] - 488..489 'c': &'static [u8; 6] + 488..489 'c': &'? [u8; 6] 492..504 'br#"x"\"yb"#': &'static [u8; 6] "##]], ); @@ -1124,13 +1124,13 @@ fn infer_tuple() { 116..122 '(c, x)': ((isize, &'? str), &'? str) 117..118 'c': (isize, &'? str) 120..121 'x': &'? str - 132..133 'e': (i32, &'static str) - 136..144 '(1, "e")': (i32, &'static str) + 132..133 'e': (i32, &'? str) + 136..144 '(1, "e")': (i32, &'? str) 137..138 '1': i32 140..143 '"e"': &'static str - 154..155 'f': ((i32, &'static str), &'static str) - 158..166 '(e, "d")': ((i32, &'static str), &'static str) - 159..160 'e': (i32, &'static str) + 154..155 'f': ((i32, &'? str), &'? str) + 158..166 '(e, "d")': ((i32, &'? str), &'? str) + 159..160 'e': (i32, &'? str) 162..165 '"d"': &'static str "#]], ); @@ -1201,8 +1201,8 @@ fn infer_array() { 209..215 '[1, 2]': [i32; 2] 210..211 '1': i32 213..214 '2': i32 - 225..226 'i': [&'static str; 2] - 229..239 '["a", "b"]': [&'static str; 2] + 225..226 'i': [&'? str; 2] + 229..239 '["a", "b"]': [&'? str; 2] 230..233 '"a"': &'static str 235..238 '"b"': &'static str 250..251 'b': [[&'? str; 1]; 2] @@ -1283,11 +1283,11 @@ fn infer_tuple_struct_generics() { 92..93 'A': fn A(u128) -> A 92..101 'A(42u128)': A 94..100 '42u128': u128 - 107..111 'Some': fn Some<&'static str>(&'static str) -> Option<&'static str> - 107..116 'Some("x")': Option<&'static str> + 107..111 'Some': fn Some<&'? str>(&'? str) -> Option<&'? str> + 107..116 'Some("x")': Option<&'? str> 112..115 '"x"': &'static str - 122..134 'Option::Some': fn Some<&'static str>(&'static str) -> Option<&'static str> - 122..139 'Option...e("x")': Option<&'static str> + 122..134 'Option::Some': fn Some<&'? str>(&'? str) -> Option<&'? str> + 122..139 'Option...e("x")': Option<&'? str> 135..138 '"x"': &'static str 145..149 'None': Option<{unknown}> 159..160 'x': Option @@ -1946,14 +1946,16 @@ fn closure_return_inferred() { "#, expect![[r#" 16..46 '{ ..." }; }': u32 - 26..27 'x': impl Fn() -> &'static str - 30..43 '|| { "test" }': impl Fn() -> &'static str - 33..43 '{ "test" }': &'static str + 26..27 'x': impl Fn() -> &'? str + 30..43 '|| { "test" }': impl Fn() -> &'? str + 33..43 '{ "test" }': &'? str 35..41 '"test"': &'static str "#]], ); } +// FIXME(next-solver): `&'? str` in 231..262 seems suspicious. +// Should revisit this once we fully migrated into next-solver without chalk-ir. #[test] fn coroutine_types_inferred() { check_infer( @@ -1981,10 +1983,10 @@ fn test() { 70..71 'v': i64 78..80 '{}': () 91..362 '{ ... } }': () - 101..106 'mut g': |usize| yields i64 -> &'static str - 109..218 '|r| { ... }': |usize| yields i64 -> &'static str + 101..106 'mut g': |usize| yields i64 -> &'? str + 109..218 '|r| { ... }': |usize| yields i64 -> &'? str 110..111 'r': usize - 113..218 '{ ... }': &'static str + 113..218 '{ ... }': &'? str 127..128 'a': usize 131..138 'yield 0': usize 137..138 '0': i64 @@ -1996,20 +1998,20 @@ fn test() { 187..188 '2': i64 198..212 '"return value"': &'static str 225..360 'match ... }': () - 231..239 'Pin::new': fn new<&'? mut |usize| yields i64 -> &'static str>(&'? mut |usize| yields i64 -> &'static str) -> Pin<&'? mut |usize| yields i64 -> &'static str> - 231..247 'Pin::n...mut g)': Pin<&'? mut |usize| yields i64 -> &'static str> - 231..262 'Pin::n...usize)': CoroutineState - 240..246 '&mut g': &'? mut |usize| yields i64 -> &'static str - 245..246 'g': |usize| yields i64 -> &'static str + 231..239 'Pin::new': fn new<&'? mut |usize| yields i64 -> &'? str>(&'? mut |usize| yields i64 -> &'? str) -> Pin<&'? mut |usize| yields i64 -> &'? str> + 231..247 'Pin::n...mut g)': Pin<&'? mut |usize| yields i64 -> &'? str> + 231..262 'Pin::n...usize)': CoroutineState + 240..246 '&mut g': &'? mut |usize| yields i64 -> &'? str + 245..246 'g': |usize| yields i64 -> &'? str 255..261 '0usize': usize - 273..299 'Corout...ded(y)': CoroutineState + 273..299 'Corout...ded(y)': CoroutineState 297..298 'y': i64 303..312 '{ f(y); }': () 305..306 'f': fn f(i64) 305..309 'f(y)': () 307..308 'y': i64 - 321..348 'Corout...ete(r)': CoroutineState - 346..347 'r': &'static str + 321..348 'Corout...ete(r)': CoroutineState + 346..347 'r': &'? str 352..354 '{}': () "#]], ); @@ -2705,11 +2707,11 @@ unsafe impl Allocator for Global {} #[lang = "owned_box"] #[fundamental] -pub struct Box; +pub struct Box(T, A); impl, U: ?Sized, A: Allocator> CoerceUnsized> for Box {} -pub struct Vec {} +pub struct Vec(T, A); #[lang = "slice"] impl [T] {} @@ -2732,22 +2734,22 @@ struct Astruct; impl B for Astruct {} "#, expect![[r#" - 604..608 'self': Box<[T], A> - 637..669 '{ ... }': Vec - 683..853 '{ ...])); }': () - 693..696 'vec': Vec - 699..714 '<[_]>::into_vec': fn into_vec(Box<[i32], Global>) -> Vec - 699..745 '<[_]>:...i32]))': Vec - 715..744 '#[rust...1i32])': Box<[i32; 1], Global> - 737..743 '[1i32]': [i32; 1] - 738..742 '1i32': i32 - 755..756 'v': Vec, Global> - 776..793 '<[_]> ...to_vec': fn into_vec, Global>(Box<[Box], Global>) -> Vec, Global> - 776..850 '<[_]> ...ct)]))': Vec, Global> - 794..849 '#[rust...uct)])': Box<[Box; 1], Global> - 816..848 '[#[rus...ruct)]': [Box; 1] - 817..847 '#[rust...truct)': Box - 839..846 'Astruct': Astruct + 614..618 'self': Box<[T], A> + 647..679 '{ ... }': Vec + 693..863 '{ ...])); }': () + 703..706 'vec': Vec + 709..724 '<[_]>::into_vec': fn into_vec(Box<[i32], Global>) -> Vec + 709..755 '<[_]>:...i32]))': Vec + 725..754 '#[rust...1i32])': Box<[i32; 1], Global> + 747..753 '[1i32]': [i32; 1] + 748..752 '1i32': i32 + 765..766 'v': Vec, Global> + 786..803 '<[_]> ...to_vec': fn into_vec, Global>(Box<[Box], Global>) -> Vec, Global> + 786..860 '<[_]> ...ct)]))': Vec, Global> + 804..859 '#[rust...uct)])': Box<[Box; 1], Global> + 826..858 '[#[rus...ruct)]': [Box; 1] + 827..857 '#[rust...truct)': Box + 849..856 'Astruct': Astruct "#]], ) } @@ -2923,7 +2925,7 @@ fn test { // ^^ impl Fn() let c4 = f1(); - // ^^ impl FnOnce() + ?Sized + // ^^ impl FnOnce() f2(|| { 0 }); // ^^^^^^^^ impl FnOnce() -> i32 @@ -3887,9 +3889,9 @@ fn main() { 74..75 'f': F 80..82 '{}': () 94..191 '{ ... }); }': () - 100..113 'async_closure': fn async_closure impl Future>(impl AsyncFnOnce(i32) -> impl Future) + 100..113 'async_closure': fn async_closure(impl FnOnce(i32)) 100..147 'async_... })': () - 114..146 'async ... }': impl AsyncFnOnce(i32) -> impl Future + 114..146 'async ... }': impl FnOnce(i32) 121..124 'arg': i32 126..146 '{ ... }': () 136..139 'arg': i32 @@ -3922,7 +3924,7 @@ fn foo() { expect![[r#" 110..127 '{ ...z(); }': () 116..122 'T::baz': fn baz() -> <{unknown} as Foo>::Gat<'?> - 116..124 'T::baz()': Foo::Gat<'?, {unknown}> + 116..124 'T::baz()': <{unknown} as Foo>::Gat<'?> "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/trait_aliases.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/trait_aliases.rs new file mode 100644 index 0000000000000..302ce550b8534 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/trait_aliases.rs @@ -0,0 +1,21 @@ +use crate::tests::check_types; + +#[test] +fn projection() { + check_types( + r#" +#![feature(trait_alias)] + +pub trait A { + type Output; +} + +pub trait B = A; + +pub fn a(x: T::Output) { + x; +// ^ u32 +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index 56e31a1af1b9c..41f8d4ed555f2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -1,6 +1,8 @@ use cov_mark::check; use expect_test::expect; +use crate::tests::infer_with_mismatches; + use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches, check_types}; #[test] @@ -83,6 +85,7 @@ async fn test() { } #[test] +#[ignore = "FIXME(next-solver): fix async closures"] fn infer_async_closure() { check_types( r#" @@ -162,16 +165,16 @@ unsafe impl Allocator for Global {} #[lang = "owned_box"] #[fundamental] -pub struct Box(T); +pub struct Box(T, A); impl, U: ?Sized, A: Allocator> CoerceUnsized> for Box {} fn send() -> Box + Send + 'static>{ - Box(async move {}) + Box(async move {}, Global) } fn not_send() -> Box + 'static> { - Box(async move {}) + Box(async move {}, Global) } "#, ); @@ -246,15 +249,15 @@ fn test() { v.push("foo"); for x in v { x; - } //^ &'static str + } //^ &'? str } //- /alloc.rs crate:alloc #![no_std] pub mod collections { - pub struct Vec {} + pub struct Vec { p: *const T } impl Vec { - pub fn new() -> Self { Vec {} } + pub fn new() -> Self { Vec { p: 0 as _ } } pub fn push(&mut self, t: T) { } } @@ -408,11 +411,11 @@ fn test() { let x: ::Item = 1; // ^ u32 let y: ::Item = u; - // ^ Iterable::Item + // ^ ::Item let z: T::Item = u; - // ^ Iterable::Item + // ^ ::Item let a: ::Item = u; - // ^ Iterable::Item + // ^ ::Item }"#, ); } @@ -454,7 +457,7 @@ impl S { fn test() { let s: S; s.foo(); - // ^^^^^^^ Iterable::Item + // ^^^^^^^ ::Item }"#, ); } @@ -470,7 +473,7 @@ trait Foo { type A; fn test(a: Self::A, _: impl Bar) { a; - //^ Foo::A + //^ ::A } }"#, ); @@ -720,8 +723,8 @@ fn deref_trait_with_inference_var() { check_types( r#" //- minicore: deref -struct Arc; -fn new_arc() -> Arc { Arc } +struct Arc(T); +fn new_arc() -> Arc { loop {} } impl core::ops::Deref for Arc { type Target = T; } @@ -783,13 +786,15 @@ fn test(s: Arc) { fn deref_trait_with_implicit_sized_requirement_on_inference_var() { check_types( r#" -//- minicore: deref -struct Foo; +//- minicore: deref, phantom_data +use core::marker::PhantomData; + +struct Foo(PhantomData); impl core::ops::Deref for Foo { type Target = (); } fn test() { - let foo = Foo; + let foo = Foo(PhantomData); *foo; //^^^^ () let _: Foo = foo; @@ -969,7 +974,7 @@ impl ApplyL for RefMutL { fn test() { let y: as ApplyL>::Out = no_matter; y; -} //^ ApplyL::Out +} //^ ::Out "#, ); } @@ -986,7 +991,7 @@ fn foo(t: T) -> ::Out; fn test(t: T) { let y = foo(t); y; -} //^ ApplyL::Out +} //^ ::Out "#, ); } @@ -1454,7 +1459,7 @@ trait Trait { fn foo2(&self) -> i64; } -struct Box {} +struct Box(*const T); impl core::ops::Deref for Box { type Target = T; } @@ -1475,27 +1480,27 @@ fn test(x: Box>, y: &dyn Trait) { expect![[r#" 29..33 'self': &'? Self 54..58 'self': &'? Self - 198..200 '{}': Box + '?> - 210..211 'x': Box + '?> - 234..235 'y': &'? (dyn Trait + '?) - 254..371 '{ ...2(); }': () - 260..261 'x': Box + '?> - 267..268 'y': &'? (dyn Trait + '?) - 278..279 'z': Box + '?> - 282..285 'bar': fn bar() -> Box + '?> - 282..287 'bar()': Box + '?> - 293..294 'x': Box + '?> - 293..300 'x.foo()': u64 - 306..307 'y': &'? (dyn Trait + '?) - 306..313 'y.foo()': u64 - 319..320 'z': Box + '?> - 319..326 'z.foo()': u64 - 332..333 'x': Box + '?> - 332..340 'x.foo2()': i64 - 346..347 'y': &'? (dyn Trait + '?) - 346..354 'y.foo2()': i64 - 360..361 'z': Box + '?> - 360..368 'z.foo2()': i64 + 206..208 '{}': Box + '?> + 218..219 'x': Box + '?> + 242..243 'y': &'? (dyn Trait + '?) + 262..379 '{ ...2(); }': () + 268..269 'x': Box + '?> + 275..276 'y': &'? (dyn Trait + '?) + 286..287 'z': Box + '?> + 290..293 'bar': fn bar() -> Box + '?> + 290..295 'bar()': Box + '?> + 301..302 'x': Box + '?> + 301..308 'x.foo()': u64 + 314..315 'y': &'? (dyn Trait + '?) + 314..321 'y.foo()': u64 + 327..328 'z': Box + '?> + 327..334 'z.foo()': u64 + 340..341 'x': Box + '?> + 340..348 'x.foo2()': i64 + 354..355 'y': &'? (dyn Trait + '?) + 354..362 'y.foo2()': i64 + 368..369 'z': Box + '?> + 368..376 'z.foo2()': i64 "#]], ); } @@ -1672,7 +1677,9 @@ fn test(x: (impl Trait + UnknownTrait)) { fn assoc_type_bindings() { check_infer( r#" -//- minicore: sized +//- minicore: sized, phantom_data +use core::marker::PhantomData; + trait Trait { type Type; } @@ -1681,7 +1688,7 @@ fn get(t: T) -> ::Type {} fn get2>(t: T) -> U {} fn set>(t: T) -> T {t} -struct S; +struct S(PhantomData); impl Trait for S { type Type = T; } fn test>(x: T, y: impl Trait) { @@ -1689,46 +1696,52 @@ fn test>(x: T, y: impl Trait) { get2(x); get(y); get2(y); - get(set(S)); - get2(set(S)); - get2(S::); + get(set(S(PhantomData))); + get2(set(S(PhantomData))); + get2(S::(PhantomData)); }"#, expect![[r#" - 49..50 't': T - 77..79 '{}': Trait::Type - 111..112 't': T - 122..124 '{}': U - 154..155 't': T - 165..168 '{t}': T - 166..167 't': T - 256..257 'x': T - 262..263 'y': impl Trait - 289..397 '{ ...r>); }': () - 295..298 'get': fn get(T) -> ::Type - 295..301 'get(x)': u32 - 299..300 'x': T - 307..311 'get2': fn get2(T) -> u32 - 307..314 'get2(x)': u32 - 312..313 'x': T - 320..323 'get': fn get>(impl Trait) -> as Trait>::Type - 320..326 'get(y)': i64 - 324..325 'y': impl Trait - 332..336 'get2': fn get2>(impl Trait) -> i64 - 332..339 'get2(y)': i64 - 337..338 'y': impl Trait - 345..348 'get': fn get>(S) -> as Trait>::Type - 345..356 'get(set(S))': u64 - 349..352 'set': fn set>(S) -> S - 349..355 'set(S)': S - 353..354 'S': S - 362..366 'get2': fn get2>(S) -> u64 - 362..374 'get2(set(S))': u64 - 367..370 'set': fn set>(S) -> S - 367..373 'set(S)': S - 371..372 'S': S - 380..384 'get2': fn get2>(S) -> str - 380..394 'get2(S::)': str - 385..393 'S::': S + 81..82 't': T + 109..111 '{}': ::Type + 143..144 't': T + 154..156 '{}': U + 186..187 't': T + 197..200 '{t}': T + 198..199 't': T + 304..305 'x': T + 310..311 'y': impl Trait + 337..486 '{ ...a)); }': () + 343..346 'get': fn get(T) -> ::Type + 343..349 'get(x)': u32 + 347..348 'x': T + 355..359 'get2': fn get2(T) -> u32 + 355..362 'get2(x)': u32 + 360..361 'x': T + 368..371 'get': fn get>(impl Trait) -> as Trait>::Type + 368..374 'get(y)': i64 + 372..373 'y': impl Trait + 380..384 'get2': fn get2>(impl Trait) -> i64 + 380..387 'get2(y)': i64 + 385..386 'y': impl Trait + 393..396 'get': fn get>(S) -> as Trait>::Type + 393..417 'get(se...ata)))': u64 + 397..400 'set': fn set>(S) -> S + 397..416 'set(S(...Data))': S + 401..402 'S': fn S(PhantomData) -> S + 401..415 'S(PhantomData)': S + 403..414 'PhantomData': PhantomData + 423..427 'get2': fn get2>(S) -> u64 + 423..448 'get2(s...ata)))': u64 + 428..431 'set': fn set>(S) -> S + 428..447 'set(S(...Data))': S + 432..433 'S': fn S(PhantomData) -> S + 432..446 'S(PhantomData)': S + 434..445 'PhantomData': PhantomData + 454..458 'get2': fn get2>(S) -> usize + 454..483 'get2(S...Data))': usize + 459..469 'S::': fn S(PhantomData) -> S + 459..482 'S:: + 470..481 'PhantomData': PhantomData "#]], ); } @@ -1745,7 +1758,7 @@ pub enum RustLanguage {} impl Language for RustLanguage { type Kind = SyntaxKind; } -struct SyntaxNode {} +struct SyntaxNode(L); fn foo() -> impl Iterator> {} trait Clone { @@ -1884,31 +1897,36 @@ fn super_trait_cycle() { fn super_trait_assoc_type_bounds() { check_infer( r#" +//- minicore: phantom_data +use core::marker::PhantomData; + trait SuperTrait { type Type; } trait Trait where Self: SuperTrait {} fn get2>(t: T) -> U {} fn set>(t: T) -> T {t} -struct S; +struct S(PhantomData); impl SuperTrait for S { type Type = T; } impl Trait for S {} fn test() { - get2(set(S)); + get2(set(S(PhantomData))); }"#, expect![[r#" - 102..103 't': T - 113..115 '{}': U - 145..146 't': T - 156..159 '{t}': T - 157..158 't': T - 258..279 '{ ...S)); }': () - 264..268 'get2': fn get2>(S) -> u64 - 264..276 'get2(set(S))': u64 - 269..272 'set': fn set>(S) -> S - 269..275 'set(S)': S - 273..274 'S': S + 134..135 't': T + 145..147 '{}': U + 177..178 't': T + 188..191 '{t}': T + 189..190 't': T + 306..340 '{ ...))); }': () + 312..316 'get2': fn get2>(S) -> u64 + 312..337 'get2(s...ata)))': u64 + 317..320 'set': fn set>(S) -> S + 317..336 'set(S(...Data))': S + 321..322 'S': fn S(PhantomData) -> S + 321..335 'S(PhantomData)': S + 323..334 'PhantomData': PhantomData "#]], ); } @@ -1998,7 +2016,7 @@ impl Foo { fn foo(&self) -> usize {} } -struct Lazy T>(F); +struct Lazy T>(T, F); impl Lazy { pub fn new(f: F) -> Lazy {} @@ -2012,7 +2030,7 @@ fn test() { let lazy1: Lazy = Lazy::new(|| Foo); let r1 = lazy1.foo(); - fn make_foo_fn() -> Foo {} +fn make_foo_fn() -> Foo {} let make_foo_fn_ptr: fn() -> Foo = make_foo_fn; let lazy2: Lazy = Lazy::new(make_foo_fn_ptr); let r2 = lazy2.foo(); @@ -2020,27 +2038,27 @@ fn test() { expect![[r#" 36..40 'self': &'? Foo 51..53 '{}': usize - 131..132 'f': F - 151..153 '{}': Lazy - 251..497 '{ ...o(); }': () - 261..266 'lazy1': Lazy Foo> - 283..292 'Lazy::new': fn new Foo>(impl Fn() -> Foo) -> Lazy Foo> - 283..300 'Lazy::...| Foo)': Lazy Foo> - 293..299 '|| Foo': impl Fn() -> Foo - 296..299 'Foo': Foo - 310..312 'r1': usize - 315..320 'lazy1': Lazy Foo> - 315..326 'lazy1.foo()': usize - 368..383 'make_foo_fn_ptr': fn() -> Foo - 399..410 'make_foo_fn': fn make_foo_fn() -> Foo - 420..425 'lazy2': Lazy Foo> - 442..451 'Lazy::new': fn new Foo>(fn() -> Foo) -> Lazy Foo> - 442..468 'Lazy::...n_ptr)': Lazy Foo> - 452..467 'make_foo_fn_ptr': fn() -> Foo - 478..480 'r2': usize - 483..488 'lazy2': Lazy Foo> - 483..494 'lazy2.foo()': usize - 357..359 '{}': Foo + 134..135 'f': F + 154..156 '{}': Lazy + 254..496 '{ ...o(); }': () + 264..269 'lazy1': Lazy Foo> + 286..295 'Lazy::new': fn new Foo>(impl Fn() -> Foo) -> Lazy Foo> + 286..303 'Lazy::...| Foo)': Lazy Foo> + 296..302 '|| Foo': impl Fn() -> Foo + 299..302 'Foo': Foo + 313..315 'r1': usize + 318..323 'lazy1': Lazy Foo> + 318..329 'lazy1.foo()': usize + 367..382 'make_foo_fn_ptr': fn() -> Foo + 398..409 'make_foo_fn': fn make_foo_fn() -> Foo + 419..424 'lazy2': Lazy Foo> + 441..450 'Lazy::new': fn new Foo>(fn() -> Foo) -> Lazy Foo> + 441..467 'Lazy::...n_ptr)': Lazy Foo> + 451..466 'make_foo_fn_ptr': fn() -> Foo + 477..479 'r2': usize + 482..487 'lazy2': Lazy Foo> + 482..493 'lazy2.foo()': usize + 356..358 '{}': Foo "#]], ); } @@ -2293,7 +2311,7 @@ impl Trait for S2 { }"#, expect![[r#" 40..44 'self': &'? Self - 46..47 'x': Trait::Item + 46..47 'x': ::Item 126..130 'self': &'? S 132..133 'x': u32 147..161 '{ let y = x; }': () @@ -2339,7 +2357,7 @@ trait Fold { type Result; } -struct Ty {} +struct Ty(I); impl Fold for Ty { type Result = Ty; } @@ -2381,17 +2399,20 @@ fn test() { fn trait_impl_self_ty_cycle() { check_types( r#" +//- minicore: phantom_data +use core::marker::PhantomData; + trait Trait { fn foo(&self); } -struct S; +struct S(T); impl Trait for S {} fn test() { - S.foo(); -} //^^^^^^^ {unknown} + S(PhantomData).foo(); +} //^^^^^^^^^^^^^^^^^^^^ {unknown} "#, ); } @@ -2410,7 +2431,7 @@ trait Trait2 {} fn test() where T: Trait2 { let x: T::Item = no_matter; -} //^^^^^^^^^ Trait::Item +} //^^^^^^^^^ ::Item "#, ); } @@ -2445,7 +2466,7 @@ trait Trait { fn test() where T: Trait { let x: T::Item = no_matter; -} //^^^^^^^^^ Trait::Item +} //^^^^^^^^^ ::Item "#, ); } @@ -2460,7 +2481,7 @@ use core::ops::Index; type Key = ::Key; -pub trait UnificationStoreBase: Index> { +pub trait UnificationStoreBase: Index> { type Key; fn len(&self) -> usize; @@ -2475,7 +2496,7 @@ fn test(t: T) where T: UnificationStoreMut { t.push(x); let y: Key; (x, y); -} //^^^^^^ (UnificationStoreBase::Key, UnificationStoreBase::Key) +} //^^^^^^ (::Key, ::Key) "#, ); } @@ -2740,8 +2761,8 @@ impl> Foo { fn dyn_trait_through_chalk() { check_types( r#" -//- minicore: deref -struct Box {} +//- minicore: deref, unsize, dispatch_from_dyn +struct Box(*const T); impl core::ops::Deref for Box { type Target = T; } @@ -2800,7 +2821,7 @@ pub trait IntoIterator { fn into_iter(self) -> Self::IntoIter; } -pub struct FilterMap { } +pub struct FilterMap(I, F); impl Iterator for FilterMap where F: FnMut(I::Item) -> Option, @@ -2818,7 +2839,7 @@ impl IntoIterator for I { } } -struct Vec {} +struct Vec(T); impl Vec { fn new() -> Self { loop {} } } @@ -2828,7 +2849,7 @@ impl IntoIterator for Vec { type IntoIter = IntoIter; } -pub struct IntoIter { } +pub struct IntoIter(T); impl Iterator for IntoIter { type Item = T; } @@ -2850,35 +2871,35 @@ fn main() { 242..249 'loop {}': ! 247..249 '{}': () 360..364 'self': Self - 689..693 'self': I - 700..720 '{ ... }': I - 710..714 'self': I - 779..790 '{ loop {} }': Vec - 781..788 'loop {}': ! - 786..788 '{}': () - 977..1104 '{ ... }); }': () - 983..998 'Vec::::new': fn new() -> Vec - 983..1000 'Vec::<...:new()': Vec - 983..1012 'Vec::<...iter()': IntoIter - 983..1075 'Vec::<...one })': FilterMap, impl FnMut(i32) -> Option> - 983..1101 'Vec::<... y; })': () - 1029..1074 '|x| if...None }': impl FnMut(i32) -> Option - 1030..1031 'x': i32 - 1033..1074 'if x >...None }': Option - 1036..1037 'x': i32 - 1036..1041 'x > 0': bool - 1040..1041 '0': i32 - 1042..1060 '{ Some...u32) }': Option - 1044..1048 'Some': fn Some(u32) -> Option - 1044..1058 'Some(x as u32)': Option - 1049..1050 'x': i32 - 1049..1057 'x as u32': u32 - 1066..1074 '{ None }': Option - 1068..1072 'None': Option - 1090..1100 '|y| { y; }': impl FnMut(u32) - 1091..1092 'y': u32 - 1094..1100 '{ y; }': () - 1096..1097 'y': u32 + 692..696 'self': I + 703..723 '{ ... }': I + 713..717 'self': I + 783..794 '{ loop {} }': Vec + 785..792 'loop {}': ! + 790..792 '{}': () + 981..1108 '{ ... }); }': () + 987..1002 'Vec::::new': fn new() -> Vec + 987..1004 'Vec::<...:new()': Vec + 987..1016 'Vec::<...iter()': IntoIter + 987..1079 'Vec::<...one })': FilterMap, impl FnMut(i32) -> Option> + 987..1105 'Vec::<... y; })': () + 1033..1078 '|x| if...None }': impl FnMut(i32) -> Option + 1034..1035 'x': i32 + 1037..1078 'if x >...None }': Option + 1040..1041 'x': i32 + 1040..1045 'x > 0': bool + 1044..1045 '0': i32 + 1046..1064 '{ Some...u32) }': Option + 1048..1052 'Some': fn Some(u32) -> Option + 1048..1062 'Some(x as u32)': Option + 1053..1054 'x': i32 + 1053..1061 'x as u32': u32 + 1070..1078 '{ None }': Option + 1072..1076 'None': Option + 1094..1104 '|y| { y; }': impl FnMut(u32) + 1095..1096 'y': u32 + 1098..1104 '{ y; }': () + 1100..1101 'y': u32 "#]], ); } @@ -3132,7 +3153,6 @@ fn foo() { #[test] fn dyn_fn_param_informs_call_site_closure_signature() { - cov_mark::check!(dyn_fn_param_informs_call_site_closure_signature); check_types( r#" //- minicore: fn, coerce_unsized, dispatch_from_dyn @@ -3228,7 +3248,7 @@ fn foo() { fn infer_dyn_fn_output() { check_types( r#" -//- minicore: fn +//- minicore: fn, dispatch_from_dyn fn foo() { let f: &dyn Fn() -> i32; f(); @@ -3488,7 +3508,7 @@ fn foo() { let x = ::boo(); }"#, expect![[r#" - 132..163 '{ ... }': Bar::Output + 132..163 '{ ... }': ::Output 146..153 'loop {}': ! 151..153 '{}': () 306..358 '{ ...o(); }': () @@ -3615,7 +3635,7 @@ impl Add<&i32> for i32 { type Output = i32 } impl Add for u32 { type Output = u32 } impl Add<&u32> for u32 { type Output = u32 } -struct V; +struct V(T); impl V { fn default() -> Self { loop {} } fn get(&self, _: &T) -> &T { loop {} } @@ -3634,8 +3654,7 @@ fn minimized() { #[test] fn no_builtin_binop_expectation_for_general_ty_var() { - // FIXME: Ideally type mismatch should be reported on `take_u32(42 - p)`. - check_types( + infer_with_mismatches( r#" //- minicore: add use core::ops::Add; @@ -3645,7 +3664,7 @@ impl Add<&i32> for i32 { type Output = i32; } // fallback to integer type variable for `42`. impl Add<&()> for i32 { type Output = (); } -struct V; +struct V(T); impl V { fn default() -> Self { loop {} } fn get(&self) -> &T { loop {} } @@ -3659,6 +3678,7 @@ fn minimized() { take_u32(42 + p); } "#, + true, ); } @@ -4211,21 +4231,21 @@ fn f(v: impl Trait) { } fn g<'a, T: 'a>(v: impl Trait = &'a T>) { let a = v.get::(); - //^ &'a T + //^ &'? T let a = v.get::<()>(); - //^ Trait::Assoc = &'a T>, ()> + //^ = &'a T> as Trait>::Assoc<()> } fn h<'a>(v: impl Trait = &'a i32> + Trait = &'a i64>) { let a = v.get::(); - //^ &'a i32 + //^ &'? i32 let a = v.get::(); - //^ &'a i64 + //^ &'? i64 } fn i<'a>(v: impl Trait = &'a i32, Assoc = &'a i64>) { let a = v.get::(); - //^ &'a i32 + //^ &'? i32 let a = v.get::(); - //^ &'a i64 + //^ &'? i64 } "#, ); @@ -4255,8 +4275,8 @@ fn f<'a>(v: &dyn Trait = &'a i32>) { 127..128 'v': &'? (dyn Trait = &'a i32> + '?) 164..195 '{ ...f(); }': () 170..171 'v': &'? (dyn Trait = &'a i32> + '?) - 170..184 'v.get::()': &'? i32 - 170..192 'v.get:...eref()': &'? i32 + 170..184 'v.get::()': = &'a i32> + '? as Trait>::Assoc + 170..192 'v.get:...eref()': {unknown} "#]], ); } @@ -4280,7 +4300,7 @@ where let a = t.get::(); //^ usize let a = t.get::<()>(); - //^ Trait::Assoc + //^ ::Assoc<()> } "#, @@ -4483,7 +4503,9 @@ impl Trait for () { fn derive_macro_bounds() { check_types( r#" - //- minicore: clone, derive + //- minicore: clone, derive, phantom_data + use core::marker::PhantomData; + #[derive(Clone)] struct Copy; struct NotCopy; @@ -4506,7 +4528,7 @@ fn derive_macro_bounds() { struct AssocGeneric3(Generic); #[derive(Clone)] - struct Vec(); + struct Vec(PhantomData); #[derive(Clone)] struct R1(Vec); @@ -4530,9 +4552,9 @@ fn derive_macro_bounds() { let x: &AssocGeneric3 = &AssocGeneric3(Generic(NotCopy)); let x = x.clone(); //^ &'? AssocGeneric3 - let x = (&R1(Vec())).clone(); + let x = (&R1(Vec(PhantomData))).clone(); //^ R1 - let x = (&R2(R1(Vec()))).clone(); + let x = (&R2(R1(Vec(PhantomData)))).clone(); //^ R2 } "#, @@ -4622,8 +4644,10 @@ fn ttt() { fn infer_borrow() { check_types( r#" -//- minicore: index -pub struct SomeMap; +//- minicore: index, phantom_data +use core::marker::PhantomData; + +pub struct SomeMap(PhantomData); pub trait Borrow { fn borrow(&self) -> &Borrowed; @@ -4656,7 +4680,7 @@ impl core::ops::IndexMut for SomeMap { } fn foo() { - let mut map = SomeMap; + let mut map = SomeMap(PhantomData); map["a"] = (); map; //^^^ SomeMap<&'static str> @@ -4785,30 +4809,30 @@ fn allowed2<'a>(baz: impl Baz) {} fn allowed3(baz: impl Baz>) {} "#, expect![[r#" - 139..140 'f': impl Fn({unknown}) + ?Sized + 139..140 'f': impl Fn({unknown}) 161..193 '{ ...oo); }': () 171..174 'foo': S 177..178 'S': S - 184..185 'f': impl Fn({unknown}) + ?Sized + 184..185 'f': impl Fn({unknown}) 184..190 'f(foo)': () 186..189 'foo': S - 251..252 'f': impl Fn(&'? {unknown}) + ?Sized + 251..252 'f': impl Fn(&'? {unknown}) 274..307 '{ ...oo); }': () 284..287 'foo': S 290..291 'S': S - 297..298 'f': impl Fn(&'? {unknown}) + ?Sized + 297..298 'f': impl Fn(&'? {unknown}) 297..304 'f(&foo)': () 299..303 '&foo': &'? S 300..303 'foo': S - 325..328 'bar': impl Bar<{unknown}> + ?Sized + 325..328 'bar': impl Bar<{unknown}> 350..352 '{}': () - 405..408 'bar': impl Bar<&'? {unknown}> + ?Sized + 405..408 'bar': impl Bar<&'? {unknown}> 431..433 '{}': () - 447..450 'baz': impl Baz + ?Sized + 447..450 'baz': impl Baz 480..482 '{}': () - 500..503 'baz': impl Baz + ?Sized + 500..503 'baz': impl Baz 544..546 '{}': () - 560..563 'baz': impl Baz> + ?Sized + 560..563 'baz': impl Baz> 598..600 '{}': () "#]], ) @@ -4857,29 +4881,29 @@ async fn baz i32>(c: T) { 37..38 'a': T 43..83 '{ ...ait; }': () 43..83 '{ ...ait; }': impl Future - 53..57 'fut1': AsyncFnMut::CallRefFuture<'?, T, (u32,)> + 53..57 'fut1': >::CallRefFuture<'?> 60..61 'a': T - 60..64 'a(0)': AsyncFnMut::CallRefFuture<'?, T, (u32,)> + 60..64 'a(0)': >::CallRefFuture<'?> 62..63 '0': u32 - 70..74 'fut1': AsyncFnMut::CallRefFuture<'?, T, (u32,)> + 70..74 'fut1': >::CallRefFuture<'?> 70..80 'fut1.await': i32 124..129 'mut b': T 134..174 '{ ...ait; }': () 134..174 '{ ...ait; }': impl Future - 144..148 'fut2': AsyncFnMut::CallRefFuture<'?, T, (u32,)> + 144..148 'fut2': >::CallRefFuture<'?> 151..152 'b': T - 151..155 'b(0)': AsyncFnMut::CallRefFuture<'?, T, (u32,)> + 151..155 'b(0)': >::CallRefFuture<'?> 153..154 '0': u32 - 161..165 'fut2': AsyncFnMut::CallRefFuture<'?, T, (u32,)> + 161..165 'fut2': >::CallRefFuture<'?> 161..171 'fut2.await': i32 216..217 'c': T 222..262 '{ ...ait; }': () 222..262 '{ ...ait; }': impl Future - 232..236 'fut3': AsyncFnOnce::CallOnceFuture + 232..236 'fut3': >::CallOnceFuture 239..240 'c': T - 239..243 'c(0)': AsyncFnOnce::CallOnceFuture + 239..243 'c(0)': >::CallOnceFuture 241..242 '0': u32 - 249..253 'fut3': AsyncFnOnce::CallOnceFuture + 249..253 'fut3': >::CallOnceFuture 249..259 'fut3.await': i32 "#]], ); @@ -4906,6 +4930,7 @@ fn main() { #[test] fn async_fn_return_type() { + // FIXME(next-solver): Async closures are lowered as closures currently. We should fix that. check_infer( r#" //- minicore: async_fn @@ -4923,10 +4948,108 @@ fn main() { 46..53 'loop {}': ! 51..53 '{}': () 67..97 '{ ...()); }': () - 73..76 'foo': fn foo impl Future, ()>(impl AsyncFn() -> impl Future) + 73..76 'foo': fn foo(impl Fn()) 73..94 'foo(as...|| ())': () - 77..93 'async ... || ()': impl AsyncFn() -> impl Future + 77..93 'async ... || ()': impl Fn() 91..93 '()': () "#]], ); } + +// FIXME(next-solver): Was `>::Error` but now getting error lifetime. +// This might be fixed once we migrate into next-solver fully without chalk-ir in lowering. +#[test] +fn new_solver_crash_1() { + check_infer( + r#" +pub trait Deserializer<'de> { + type Error; +} + +fn deserialize_abs_pathbuf<'de, D>(de: D) -> D::Error +where + D: Deserializer<'de>, +{ +} +"#, + expect![[r#" + 84..86 'de': D + 135..138 '{ }': >::Error + "#]], + ); +} + +#[test] +fn new_solver_crash_2() { + check_infer( + r#" +//- minicore: deref, send, sync +use core::ops::Deref; + +trait Error {} + +struct AnyhowError; + +impl Deref for AnyhowError { + type Target = dyn Error + Send + Sync; + + fn deref(&self) -> &Self::Target { loop {} } +} + +impl AnyhowError { + fn downcast(self) {} +} + + +fn main() { + let e = AnyhowError; + e.downcast::<()>(); +} +"#, + expect![[r#" + 147..151 'self': &'? AnyhowError + 170..181 '{ loop {} }': &'? (dyn Error + Send + Sync + 'static) + 172..179 'loop {}': ! + 177..179 '{}': () + 223..227 'self': AnyhowError + 229..231 '{}': () + 246..298 '{ ...>(); }': () + 256..257 'e': AnyhowError + 260..271 'AnyhowError': AnyhowError + 277..278 'e': AnyhowError + 277..295 'e.down...<()>()': () + "#]], + ); +} + +#[test] +fn trait_object_binders() { + check_infer( + r#" +//- minicore: iterator, dispatch_from_dyn +fn main() { + struct Box(*const T); + impl Iterator for Box { + type Item = I::Item; + fn next(&mut self) -> Option { + loop {} + } + } + let iter: Box + 'static> = loop {}; + let _ = iter.into_iter(); +}"#, + expect![[r#" + 10..313 '{ ...r(); }': () + 223..227 'iter': Box + 'static> + 273..280 'loop {}': ! + 278..280 '{}': () + 290..291 '_': Box + '?> + 294..298 'iter': Box + 'static> + 294..310 'iter.i...iter()': Box + 'static> + 152..156 'self': &'? mut Box + 177..208 '{ ... }': Option<::Item> + 191..198 'loop {}': ! + 196..198 '{}': () + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs index f53409af2b30c..fe4cf7a3da527 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs @@ -10,6 +10,7 @@ use crate::{ }; use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId}; +#[allow(unused)] pub(crate) use unsafe_tls::{set_current_program, with_current_program}; pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase); @@ -136,6 +137,7 @@ mod unsafe_tls { if PROGRAM.is_set() { PROGRAM.with(|prog| op(Some(prog))) } else { op(None) } } + #[allow(dead_code)] pub(crate) fn set_current_program(p: &dyn HirDatabase, op: OP) -> R where OP: FnOnce() -> R, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 08b9d242e71d2..8095d702be489 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -1,43 +1,38 @@ //! Trait solving using Chalk. use core::fmt; -use std::env::var; +use std::hash::Hash; use chalk_ir::{DebruijnIndex, GoalData, fold::TypeFoldable}; -use chalk_recursive::Cache; -use chalk_solve::{Solver, logging_db::LoggingRustIrDatabase, rust_ir}; use base_db::Crate; use hir_def::{BlockId, TraitId, lang_item::LangItem}; use hir_expand::name::Name; use intern::sym; +use rustc_next_trait_solver::solve::{HasChanged, SolverDelegateEvalExt}; +use rustc_type_ir::{ + InferCtxtLike, TypingMode, + inherent::{SliceLike, Span as _}, + solve::Certainty, +}; use span::Edition; -use stdx::{never, panic_context}; +use stdx::never; use triomphe::Arc; use crate::{ - AliasEq, AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy, - ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause, db::HirDatabase, - infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, + AliasEq, AliasTy, Canonical, DomainGoal, Goal, InEnvironment, Interner, ProjectionTy, + ProjectionTyExt, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause, + db::HirDatabase, + infer::unify::InferenceTable, + next_solver::{ + DbInterner, GenericArg, Predicate, SolverContext, Span, + infer::{DbInternerInferExt, InferCtxt}, + mapping::{ChalkToNextSolver, convert_canonical_args_for_result}, + util::mini_canonicalize, + }, + utils::UnevaluatedConstEvaluatorFolder, }; -/// This controls how much 'time' we give the Chalk solver before giving up. -const CHALK_SOLVER_FUEL: i32 = 1000; - -#[derive(Debug, Copy, Clone)] -pub(crate) struct ChalkContext<'a> { - pub(crate) db: &'a dyn HirDatabase, - pub(crate) krate: Crate, - pub(crate) block: Option, -} - -fn create_chalk_solver() -> chalk_recursive::RecursiveSolver { - let overflow_depth = - var("CHALK_OVERFLOW_DEPTH").ok().and_then(|s| s.parse().ok()).unwrap_or(500); - let max_size = var("CHALK_SOLVER_MAX_SIZE").ok().and_then(|s| s.parse().ok()).unwrap_or(150); - chalk_recursive::RecursiveSolver::new(overflow_depth, max_size, Some(Cache::new())) -} - /// A set of clauses that we assume to be true. E.g. if we are inside this function: /// ```rust /// fn foo(t: T) {} @@ -103,13 +98,43 @@ pub(crate) fn normalize_projection_query( table.resolve_completely(ty) } +fn identity_subst( + binders: chalk_ir::CanonicalVarKinds, +) -> chalk_ir::Canonical> { + let identity_subst = chalk_ir::Substitution::from_iter( + Interner, + binders.iter(Interner).enumerate().map(|(index, c)| { + let index_db = chalk_ir::BoundVar::new(DebruijnIndex::INNERMOST, index); + match &c.kind { + chalk_ir::VariableKind::Ty(_) => { + chalk_ir::GenericArgData::Ty(TyKind::BoundVar(index_db).intern(Interner)) + .intern(Interner) + } + chalk_ir::VariableKind::Lifetime => chalk_ir::GenericArgData::Lifetime( + chalk_ir::LifetimeData::BoundVar(index_db).intern(Interner), + ) + .intern(Interner), + chalk_ir::VariableKind::Const(ty) => chalk_ir::GenericArgData::Const( + chalk_ir::ConstData { + ty: ty.clone(), + value: chalk_ir::ConstValue::BoundVar(index_db), + } + .intern(Interner), + ) + .intern(Interner), + } + }), + ); + chalk_ir::Canonical { binders, value: identity_subst } +} + /// Solve a trait goal using Chalk. pub(crate) fn trait_solve_query( db: &dyn HirDatabase, krate: Crate, block: Option, goal: Canonical>, -) -> Option { +) -> NextTraitSolveResult { let _p = tracing::info_span!("trait_solve_query", detail = ?match &goal.value.goal.data(Interner) { GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => db .trait_signature(it.hir_trait_id()) @@ -128,7 +153,7 @@ pub(crate) fn trait_solve_query( && let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) { // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible - return Some(Solution::Ambig(Guidance::Unknown)); + return NextTraitSolveResult::Uncertain(identity_subst(goal.binders.clone())); } // Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So @@ -139,71 +164,183 @@ pub(crate) fn trait_solve_query( // We currently don't deal with universes (I think / hope they're not yet // relevant for our use cases?) - let u_canonical = chalk_ir::UCanonical { canonical: goal, universes: 1 }; - solve(db, krate, block, &u_canonical) + next_trait_solve(db, krate, block, goal) } -fn solve( - db: &dyn HirDatabase, +fn solve_nextsolver<'db>( + db: &'db dyn HirDatabase, krate: Crate, block: Option, goal: &chalk_ir::UCanonical>>, -) -> Option> { - let _p = tracing::info_span!("solve", ?krate, ?block).entered(); - let context = ChalkContext { db, krate, block }; - tracing::debug!("solve goal: {:?}", goal); - let mut solver = create_chalk_solver(); - - let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL); - - let should_continue = || { - db.unwind_if_revision_cancelled(); - let remaining = fuel.get(); - fuel.set(remaining - 1); - if remaining == 0 { - tracing::debug!("fuel exhausted"); +) -> Result< + (HasChanged, Certainty, rustc_type_ir::Canonical, Vec>>), + rustc_type_ir::solve::NoSolution, +> { + // FIXME: should use analysis_in_body, but that needs GenericDefId::Block + let context = SolverContext( + DbInterner::new_with(db, Some(krate), block) + .infer_ctxt() + .build(TypingMode::non_body_analysis()), + ); + + match goal.canonical.value.goal.data(Interner) { + // FIXME: args here should be...what? not empty + GoalData::All(goals) if goals.is_empty(Interner) => { + return Ok((HasChanged::No, Certainty::Yes, mini_canonicalize(context, vec![]))); } - remaining > 0 - }; + _ => {} + } - let mut solve = || { - let _ctx = if is_chalk_debug() || is_chalk_print() { - Some(panic_context::enter(format!("solving {goal:?}"))) - } else { - None - }; - let solution = if is_chalk_print() { - let logging_db = - LoggingRustIrDatabaseLoggingOnDrop(LoggingRustIrDatabase::new(context)); - solver.solve_limited(&logging_db.0, goal, &should_continue) - } else { - solver.solve_limited(&context, goal, &should_continue) - }; - - tracing::debug!("solve({:?}) => {:?}", goal, solution); - - solution - }; + let goal = goal.canonical.to_nextsolver(context.cx()); + tracing::info!(?goal); + + let (goal, var_values) = context.instantiate_canonical(&goal); + tracing::info!(?var_values); + + let res = context.evaluate_root_goal(goal, Span::dummy(), None); + + let vars = + var_values.var_values.iter().map(|g| context.0.resolve_vars_if_possible(g)).collect(); + let canonical_var_values = mini_canonicalize(context, vars); + + let res = res.map(|r| (r.has_changed, r.certainty, canonical_var_values)); + + tracing::debug!("solve_nextsolver({:?}) => {:?}", goal, res); + + res +} - // don't set the TLS for Chalk unless Chalk debugging is active, to make - // extra sure we only use it for debugging - if is_chalk_debug() { crate::tls::set_current_program(db, solve) } else { solve() } +#[derive(Clone, Debug, PartialEq)] +pub enum NextTraitSolveResult { + Certain(chalk_ir::Canonical>), + Uncertain(chalk_ir::Canonical>), + NoSolution, } -struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase>); +impl NextTraitSolveResult { + pub fn no_solution(&self) -> bool { + matches!(self, NextTraitSolveResult::NoSolution) + } -impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> { - fn drop(&mut self) { - tracing::info!("chalk program:\n{}", self.0); + pub fn certain(&self) -> bool { + matches!(self, NextTraitSolveResult::Certain(..)) + } + + pub fn uncertain(&self) -> bool { + matches!(self, NextTraitSolveResult::Uncertain(..)) } } -fn is_chalk_debug() -> bool { - std::env::var("CHALK_DEBUG").is_ok() +pub fn next_trait_solve( + db: &dyn HirDatabase, + krate: Crate, + block: Option, + goal: Canonical>, +) -> NextTraitSolveResult { + let detail = match &goal.value.goal.data(Interner) { + GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => { + db.trait_signature(it.hir_trait_id()).name.display(db, Edition::LATEST).to_string() + } + GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(), + _ => "??".to_owned(), + }; + let _p = tracing::info_span!("next_trait_solve", ?detail).entered(); + tracing::info!("next_trait_solve({:?})", goal.value.goal); + + if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq { + alias: AliasTy::Projection(projection_ty), + .. + }))) = &goal.value.goal.data(Interner) + && let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) + { + // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible + // FIXME + return NextTraitSolveResult::Uncertain(identity_subst(goal.binders.clone())); + } + + // Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So + // we should get rid of it when talking to chalk. + let goal = goal + .try_fold_with(&mut UnevaluatedConstEvaluatorFolder { db }, DebruijnIndex::INNERMOST) + .unwrap(); + + // We currently don't deal with universes (I think / hope they're not yet + // relevant for our use cases?) + let u_canonical = chalk_ir::UCanonical { canonical: goal, universes: 1 }; + tracing::info!(?u_canonical); + + let next_solver_res = solve_nextsolver(db, krate, block, &u_canonical); + + match next_solver_res { + Err(_) => NextTraitSolveResult::NoSolution, + Ok((_, Certainty::Yes, args)) => NextTraitSolveResult::Certain( + convert_canonical_args_for_result(DbInterner::new_with(db, Some(krate), block), args), + ), + Ok((_, Certainty::Maybe { .. }, args)) => { + let subst = convert_canonical_args_for_result( + DbInterner::new_with(db, Some(krate), block), + args, + ); + NextTraitSolveResult::Uncertain(chalk_ir::Canonical { + binders: subst.binders, + value: subst.value.subst, + }) + } + } } -fn is_chalk_print() -> bool { - std::env::var("CHALK_PRINT").is_ok() +pub fn next_trait_solve_canonical_in_ctxt<'db>( + infer_ctxt: &InferCtxt<'db>, + goal: crate::next_solver::Canonical<'db, crate::next_solver::Goal<'db, Predicate<'db>>>, +) -> NextTraitSolveResult { + let context = SolverContext(infer_ctxt.clone()); + + tracing::info!(?goal); + + let (goal, var_values) = context.instantiate_canonical(&goal); + tracing::info!(?var_values); + + let res = context.evaluate_root_goal(goal, Span::dummy(), None); + + let vars = + var_values.var_values.iter().map(|g| context.0.resolve_vars_if_possible(g)).collect(); + let canonical_var_values = mini_canonicalize(context, vars); + + let res = res.map(|r| (r.has_changed, r.certainty, canonical_var_values)); + + tracing::debug!("solve_nextsolver({:?}) => {:?}", goal, res); + + match res { + Err(_) => NextTraitSolveResult::NoSolution, + Ok((_, Certainty::Yes, args)) => NextTraitSolveResult::Certain( + convert_canonical_args_for_result(infer_ctxt.interner, args), + ), + Ok((_, Certainty::Maybe { .. }, args)) => { + let subst = convert_canonical_args_for_result(infer_ctxt.interner, args); + NextTraitSolveResult::Uncertain(chalk_ir::Canonical { + binders: subst.binders, + value: subst.value.subst, + }) + } + } +} + +/// Solve a trait goal using Chalk. +pub fn next_trait_solve_in_ctxt<'db, 'a>( + infer_ctxt: &'a InferCtxt<'db>, + goal: crate::next_solver::Goal<'db, crate::next_solver::Predicate<'db>>, +) -> Result<(HasChanged, Certainty), rustc_type_ir::solve::NoSolution> { + tracing::info!(?goal); + + let context = <&SolverContext<'db>>::from(infer_ctxt); + + let res = context.evaluate_root_goal(goal, Span::dummy(), None); + + let res = res.map(|r| (r.has_changed, r.certainty)); + + tracing::debug!("solve_nextsolver({:?}) => {:?}", goal, res); + + res } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] @@ -267,15 +404,6 @@ impl FnTrait { } } - pub const fn to_chalk_ir(self) -> rust_ir::ClosureKind { - // Chalk doesn't support async fn traits. - match self { - FnTrait::AsyncFnOnce | FnTrait::FnOnce => rust_ir::ClosureKind::FnOnce, - FnTrait::AsyncFnMut | FnTrait::FnMut => rust_ir::ClosureKind::FnMut, - FnTrait::AsyncFn | FnTrait::Fn => rust_ir::ClosureKind::Fn, - } - } - pub fn method_name(self) -> Name { match self { FnTrait::FnOnce => Name::new_symbol_root(sym::call_once), @@ -290,9 +418,4 @@ impl FnTrait { pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option { self.lang_item().resolve_trait(db, krate) } - - #[inline] - pub(crate) fn is_async(self) -> bool { - matches!(self, FnTrait::AsyncFn | FnTrait::AsyncFnMut | FnTrait::AsyncFnOnce) - } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 209ec7926e825..427c4bb68423d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -3,11 +3,11 @@ use std::{cell::LazyCell, iter}; -use base_db::Crate; -use chalk_ir::{ - DebruijnIndex, - fold::{FallibleTypeFolder, Shift}, +use base_db::{ + Crate, + target::{self, TargetData}, }; +use chalk_ir::{DebruijnIndex, fold::FallibleTypeFolder}; use hir_def::{ EnumId, EnumVariantId, FunctionId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, db::DefDatabase, @@ -20,17 +20,23 @@ use hir_expand::name::Name; use intern::sym; use rustc_abi::TargetDataLayout; use rustc_hash::FxHashSet; +use rustc_type_ir::inherent::{GenericArgs, IntoKind, SliceLike}; use smallvec::{SmallVec, smallvec}; use span::Edition; -use stdx::never; +use crate::next_solver::mapping::NextSolverToChalk; use crate::{ - ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef, - TraitRefExt, Ty, WhereClause, + ChalkTraitId, Const, ConstScalar, Interner, Substitution, TargetFeatures, TraitRef, + TraitRefExt, Ty, consteval::unknown_const, db::HirDatabase, layout::{Layout, TagEncoding}, mir::pad16, + next_solver::{ + DbInterner, + mapping::{ChalkToNextSolver, convert_args_for_result}, + }, + to_chalk_trait_id, }; pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: Crate) -> impl Iterator + '_ { @@ -114,52 +120,6 @@ impl Iterator for SuperTraits<'_> { } } -pub(super) fn elaborate_clause_supertraits( - db: &dyn HirDatabase, - clauses: impl Iterator, -) -> ClauseElaborator<'_> { - let mut elaborator = ClauseElaborator { db, stack: Vec::new(), seen: FxHashSet::default() }; - elaborator.extend_deduped(clauses); - - elaborator -} - -pub(super) struct ClauseElaborator<'a> { - db: &'a dyn HirDatabase, - stack: Vec, - seen: FxHashSet, -} - -impl ClauseElaborator<'_> { - fn extend_deduped(&mut self, clauses: impl IntoIterator) { - self.stack.extend(clauses.into_iter().filter(|c| self.seen.insert(c.clone()))) - } - - fn elaborate_supertrait(&mut self, clause: &WhereClause) { - if let WhereClause::Implemented(trait_ref) = clause { - direct_super_trait_refs(self.db, trait_ref, |t| { - let clause = WhereClause::Implemented(t); - if self.seen.insert(clause.clone()) { - self.stack.push(clause); - } - }); - } - } -} - -impl Iterator for ClauseElaborator<'_> { - type Item = WhereClause; - - fn next(&mut self) -> Option { - if let Some(next) = self.stack.pop() { - self.elaborate_supertrait(&next); - Some(next) - } else { - None - } - } -} - fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) { let resolver = LazyCell::new(|| trait_.resolver(db)); let (generic_params, store) = db.generic_params_and_store(trait_.into()); @@ -191,25 +151,34 @@ fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut( } fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) { + let interner = DbInterner::new_with(db, None, None); let generic_params = db.generic_params(trait_ref.hir_trait_id().into()); let trait_self = match generic_params.trait_self_param() { Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p }, None => return, }; - db.generic_predicates_for_param(trait_self.parent, trait_self, None) + let trait_ref_args: crate::next_solver::GenericArgs<'_> = + trait_ref.substitution.to_nextsolver(interner); + db.generic_predicates_for_param_ns(trait_self.parent, trait_self, None) .iter() .filter_map(|pred| { - pred.as_ref().filter_map(|pred| match pred.skip_binders() { - // FIXME: how to correctly handle higher-ranked bounds here? - WhereClause::Implemented(tr) => Some( - tr.clone() - .shifted_out_to(Interner, DebruijnIndex::ONE) - .expect("FIXME unexpected higher-ranked trait bound"), - ), + let pred = pred.kind(); + // FIXME: how to correctly handle higher-ranked bounds here? + let pred = pred.no_bound_vars().expect("FIXME unexpected higher-ranked trait bound"); + match pred { + rustc_type_ir::ClauseKind::Trait(t) => { + let t = + rustc_type_ir::EarlyBinder::bind(t).instantiate(interner, trait_ref_args); + let trait_id = to_chalk_trait_id(t.def_id().0); + + let substitution = + convert_args_for_result(interner, t.trait_ref.args.as_slice()); + let tr = chalk_ir::TraitRef { trait_id, substitution }; + Some(tr) + } _ => None, - }) + } }) - .map(|pred| pred.substitute(Interner, &trait_ref.substitution)) .for_each(cb); } @@ -224,34 +193,25 @@ pub(super) fn associated_type_by_name_including_super_traits( }) } -/// It is a bit different from the rustc equivalent. Currently it stores: -/// - 0..n-1: generics of the parent -/// - n: the function signature, encoded as a function pointer type -/// -/// and it doesn't store the closure types and fields. -/// -/// Codes should not assume this ordering, and should always use methods available -/// on this struct for retrieving, and `TyBuilder::substs_for_closure` for creating. pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution); impl<'a> ClosureSubst<'a> { - pub(crate) fn parent_subst(&self) -> &'a [GenericArg] { - match self.0.as_slice(Interner) { - [x @ .., _] => x, - _ => { - never!("Closure missing parameter"); - &[] - } - } + pub(crate) fn parent_subst(&self, db: &dyn HirDatabase) -> Substitution { + let interner = DbInterner::new_with(db, None, None); + let subst = + >>::to_nextsolver( + self.0, interner, + ); + subst.split_closure_args().parent_args.to_chalk(interner) } - pub(crate) fn sig_ty(&self) -> &'a Ty { - match self.0.as_slice(Interner) { - [.., x] => x.assert_ty_ref(Interner), - _ => { - unreachable!("Closure missing sig_ty parameter"); - } - } + pub(crate) fn sig_ty(&self, db: &dyn HirDatabase) -> Ty { + let interner = DbInterner::new_with(db, None, None); + let subst = + >>::to_nextsolver( + self.0, interner, + ); + subst.split_closure_args_untupled().closure_sig_as_fn_ptr_ty.to_chalk(interner) } } @@ -263,18 +223,32 @@ pub enum Unsafety { DeprecatedSafe2024, } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TargetFeatureIsSafeInTarget { + No, + Yes, +} + +pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsSafeInTarget { + match target.arch { + target::Arch::Wasm32 | target::Arch::Wasm64 => TargetFeatureIsSafeInTarget::Yes, + _ => TargetFeatureIsSafeInTarget::No, + } +} + pub fn is_fn_unsafe_to_call( db: &dyn HirDatabase, func: FunctionId, caller_target_features: &TargetFeatures, call_edition: Edition, + target_feature_is_safe: TargetFeatureIsSafeInTarget, ) -> Unsafety { let data = db.function_signature(func); if data.is_unsafe() { return Unsafety::Unsafe; } - if data.has_target_feature() { + if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No { // RFC 2396 . let callee_target_features = TargetFeatures::from_attrs_no_implications(&db.attrs(func.into())); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs index 08a215fecf623..8593dba301b85 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs @@ -49,7 +49,23 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option { // Chalk has no params, so use placeholders for now? TyKind::Placeholder(index) => { - let idx = crate::from_placeholder_idx(self.db, *index); + let idx = crate::from_placeholder_idx(self.db, *index).0; let index = self.generics.type_or_const_param_idx(idx).unwrap(); self.constrain(index, variance); } @@ -445,7 +462,7 @@ impl Context<'_> { ); match region.data(Interner) { LifetimeData::Placeholder(index) => { - let idx = crate::lt_from_placeholder_idx(self.db, *index); + let idx = crate::lt_from_placeholder_idx(self.db, *index).0; let inferred = self.generics.lifetime_idx(idx).unwrap(); self.constrain(inferred, variance); } @@ -581,8 +598,8 @@ struct Other<'a> { } "#, expect![[r#" - Hello['a: bivariant] - Other['a: bivariant] + Hello['a: invariant] + Other['a: invariant] "#]], ); } @@ -601,7 +618,7 @@ struct Foo { //~ ERROR [T: o] } "#, expect![[r#" - Foo[T: bivariant] + Foo[T: invariant] "#]], ); } @@ -683,9 +700,9 @@ struct TestBox+Setter> { //~ ERROR [U: *, T: +] get[Self: contravariant, T: covariant] get[Self: contravariant, T: contravariant] TestStruct[U: covariant, T: covariant] - TestEnum[U: bivariant, T: covariant] - TestContraStruct[U: bivariant, T: covariant] - TestBox[U: bivariant, T: covariant] + TestEnum[U: invariant, T: covariant] + TestContraStruct[U: invariant, T: covariant] + TestBox[U: invariant, T: covariant] "#]], ); } @@ -805,8 +822,8 @@ enum SomeEnum<'a> { Nothing } //~ ERROR parameter `'a` is never used trait SomeTrait<'a> { fn foo(&self); } // OK on traits. "#, expect![[r#" - SomeStruct['a: bivariant] - SomeEnum['a: bivariant] + SomeStruct['a: invariant] + SomeEnum['a: invariant] foo[Self: contravariant, 'a: invariant] "#]], ); @@ -834,14 +851,14 @@ struct DoubleNothing { "#, expect![[r#" - SomeStruct[A: bivariant] - SomeEnum[A: bivariant] - ListCell[T: bivariant] - SelfTyAlias[T: bivariant] - WithBounds[T: bivariant] - WithWhereBounds[T: bivariant] - WithOutlivesBounds[T: bivariant] - DoubleNothing[T: bivariant] + SomeStruct[A: invariant] + SomeEnum[A: invariant] + ListCell[T: invariant] + SelfTyAlias[T: invariant] + WithBounds[T: invariant] + WithWhereBounds[T: invariant] + WithOutlivesBounds[T: invariant] + DoubleNothing[T: invariant] "#]], ); } @@ -952,7 +969,7 @@ struct S3(S); "#, expect![[r#" S[T: covariant] - S2[T: bivariant] + S2[T: invariant] S3[T: covariant] "#]], ); @@ -965,7 +982,7 @@ struct S3(S); struct FixedPoint(&'static FixedPoint<(), T, U>, V); "#, expect![[r#" - FixedPoint[T: bivariant, U: bivariant, V: bivariant] + FixedPoint[T: invariant, U: invariant, V: invariant] "#]], ); } @@ -990,7 +1007,6 @@ struct FixedPoint(&'static FixedPoint<(), T, U>, V); ModuleDefId::AdtId(it) => it.into(), ModuleDefId::ConstId(it) => it.into(), ModuleDefId::TraitId(it) => it.into(), - ModuleDefId::TraitAliasId(it) => it.into(), ModuleDefId::TypeAliasId(it) => it.into(), _ => return, }) @@ -1021,10 +1037,6 @@ struct FixedPoint(&'static FixedPoint<(), T, U>, V); let loc = it.lookup(&db); loc.source(&db).value.name().unwrap() } - GenericDefId::TraitAliasId(it) => { - let loc = it.lookup(&db); - loc.source(&db).value.name().unwrap() - } GenericDefId::TypeAliasId(it) => { let loc = it.lookup(&db); loc.source(&db).value.name().unwrap() diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index c68ff706e4814..dfa39384320de 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -22,6 +22,8 @@ tracing.workspace = true triomphe.workspace = true indexmap.workspace = true +ra-ap-rustc_type_ir.workspace = true + # local deps base-db.workspace = true cfg.workspace = true @@ -36,6 +38,9 @@ span.workspace = true [dev-dependencies] expect-test.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +tracing-tree.workspace = true # local deps test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index c8645b6282392..c230bbad0bc45 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -14,12 +14,16 @@ use hir_expand::{ mod_path::{ModPath, PathKind}, name::Name, }; -use hir_ty::{db::HirDatabase, method_resolution}; +use hir_ty::{ + db::HirDatabase, + method_resolution, + next_solver::{DbInterner, mapping::ChalkToNextSolver}, +}; use crate::{ Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, Field, Function, GenericParam, HasCrate, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, - Struct, Trait, TraitAlias, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, + Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, }; pub trait HasAttrs { @@ -48,7 +52,6 @@ impl_has_attrs![ (Static, StaticId), (Const, ConstId), (Trait, TraitId), - (TraitAlias, TraitAliasId), (TypeAlias, TypeAliasId), (Macro, MacroId), (Function, FunctionId), @@ -137,7 +140,6 @@ fn resolve_doc_path_on_( AttrDefId::StaticId(it) => it.resolver(db), AttrDefId::ConstId(it) => it.resolver(db), AttrDefId::TraitId(it) => it.resolver(db), - AttrDefId::TraitAliasId(it) => it.resolver(db), AttrDefId::TypeAliasId(it) => it.resolver(db), AttrDefId::ImplId(it) => it.resolver(db), AttrDefId::ExternBlockId(it) => it.resolver(db), @@ -216,10 +218,6 @@ fn resolve_assoc_or_field( DocLinkDef::ModuleDef(def) }); } - TypeNs::TraitAliasId(_) => { - // XXX: Do these get resolved? - return None; - } TypeNs::ModuleId(_) => { return None; } @@ -277,7 +275,11 @@ fn resolve_impl_trait_item<'db>( // // FIXME: resolve type aliases (which are not yielded by iterate_path_candidates) _ = method_resolution::iterate_path_candidates( - &canonical, + &canonical.to_nextsolver(DbInterner::new_with( + db, + Some(environment.krate), + environment.block, + )), db, environment, &traits_in_scope, diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 2960ebedf3806..833a9ef03065d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -23,8 +23,8 @@ use itertools::Itertools; use crate::{ Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum, ExternCrateDecl, Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam, - Macro, Module, SelfParam, Static, Struct, StructKind, Trait, TraitAlias, TraitRef, TupleField, - TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant, + Macro, Module, SelfParam, Static, Struct, StructKind, Trait, TraitRef, TupleField, TyBuilder, + Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant, }; impl HirDisplay for Function { @@ -474,8 +474,8 @@ impl HirDisplay for TypeParam { let param_data = ¶ms[self.id.local_id()]; let substs = TyBuilder::placeholder_subst(f.db, self.id.parent()); let krate = self.id.parent().krate(f.db).id; - let ty = - TyKind::Placeholder(hir_ty::to_placeholder_idx(f.db, self.id.into())).intern(Interner); + let ty = TyKind::Placeholder(hir_ty::to_placeholder_idx_no_index(f.db, self.id.into())) + .intern(Interner); let predicates = f.db.generic_predicates(self.id.parent()); let predicates = predicates .iter() @@ -528,8 +528,11 @@ impl HirDisplay for TypeParam { f, ":", Either::Left( - &hir_ty::TyKind::Placeholder(hir_ty::to_placeholder_idx(f.db, self.id.into())) - .intern(Interner), + &hir_ty::TyKind::Placeholder(hir_ty::to_placeholder_idx_no_index( + f.db, + self.id.into(), + )) + .intern(Interner), ), &predicates, default_sized, @@ -751,6 +754,7 @@ impl HirDisplay for TraitRef<'_> { impl HirDisplay for Trait { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { + // FIXME(trait-alias) needs special handling to print the equal sign write_trait_header(self, f)?; let def_id = GenericDefId::TraitId(self.id); let has_where_clause = write_where_clause(def_id, f)?; @@ -802,22 +806,6 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi Ok(()) } -impl HirDisplay for TraitAlias { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; - let data = f.db.trait_alias_signature(self.id); - write!(f, "trait {}", data.name.display(f.db, f.edition()))?; - let def_id = GenericDefId::TraitAliasId(self.id); - write_generic_params(def_id, f)?; - f.write_str(" = ")?; - // FIXME: Currently we lower every bounds in a trait alias as a trait bound on `Self` i.e. - // `trait Foo = Bar` is stored and displayed as `trait Foo = where Self: Bar`, which might - // be less readable. - write_where_clause(def_id, f)?; - Ok(()) - } -} - impl HirDisplay for TypeAlias { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs index c6446693df3e4..bc025c5ef5cf1 100644 --- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs +++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs @@ -37,7 +37,6 @@ from_id![ (hir_def::EnumId, crate::Enum), (hir_def::TypeAliasId, crate::TypeAlias), (hir_def::TraitId, crate::Trait), - (hir_def::TraitAliasId, crate::TraitAlias), (hir_def::StaticId, crate::Static), (hir_def::ConstId, crate::Const), (hir_def::FunctionId, crate::Function), @@ -113,7 +112,6 @@ impl From for ModuleDef { ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()), ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()), ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()), - ModuleDefId::TraitAliasId(it) => ModuleDef::TraitAlias(it.into()), ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()), ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it.into()), ModuleDefId::MacroId(it) => ModuleDef::Macro(it.into()), @@ -131,7 +129,6 @@ impl From for ModuleDefId { ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()), ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()), ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()), - ModuleDef::TraitAlias(it) => ModuleDefId::TraitAliasId(it.into()), ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()), ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()), ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()), @@ -177,7 +174,6 @@ impl From for GenericDefId { GenericDef::Function(it) => GenericDefId::FunctionId(it.id), GenericDef::Adt(it) => GenericDefId::AdtId(it.into()), GenericDef::Trait(it) => GenericDefId::TraitId(it.id), - GenericDef::TraitAlias(it) => GenericDefId::TraitAliasId(it.id), GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), GenericDef::Impl(it) => GenericDefId::ImplId(it.id), GenericDef::Const(it) => GenericDefId::ConstId(it.id), @@ -192,7 +188,6 @@ impl From for GenericDef { GenericDefId::FunctionId(it) => GenericDef::Function(it.into()), GenericDefId::AdtId(it) => GenericDef::Adt(it.into()), GenericDefId::TraitId(it) => GenericDef::Trait(it.into()), - GenericDefId::TraitAliasId(it) => GenericDef::TraitAlias(it.into()), GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()), GenericDefId::ImplId(it) => GenericDef::Impl(it.into()), GenericDefId::ConstId(it) => GenericDef::Const(it.into()), diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs index 4767d4792e718..ae82275e38736 100644 --- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs @@ -14,8 +14,7 @@ use tt::TextRange; use crate::{ Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl, InlineAsmOperand, Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static, - Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam, Union, Variant, VariantDef, - db::HirDatabase, + Struct, Trait, TypeAlias, TypeOrConstParam, Union, Variant, VariantDef, db::HirDatabase, }; pub trait HasSource { @@ -168,12 +167,6 @@ impl HasSource for Trait { Some(self.id.lookup(db).source(db)) } } -impl HasSource for TraitAlias { - type Ast = ast::TraitAlias; - fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db).source(db)) - } -} impl HasSource for TypeAlias { type Ast = ast::TypeAlias; fn source(self, db: &dyn HirDatabase) -> Option> { @@ -202,7 +195,7 @@ impl HasSource for Impl { } impl HasSource for TypeOrConstParam { - type Ast = Either; + type Ast = Either; fn source(self, db: &dyn HirDatabase) -> Option> { let child_source = self.id.parent.child_source(db); child_source.map(|it| it.get(self.id.local_id).cloned()).transpose() diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index a323f97997c68..f8dacf0fb863d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -20,6 +20,12 @@ #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "512"] +#[cfg(feature = "in-rust-tree")] +extern crate rustc_type_ir; + +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_type_ir as rustc_type_ir; + mod attrs; mod from_id; mod has_source; @@ -46,7 +52,7 @@ use hir_def::{ CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax, - TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, + TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap}, hir::{ BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat, @@ -54,10 +60,13 @@ use hir_def::{ }, item_tree::ImportAlias, layout::{self, ReprOptions, TargetDataLayout}, - nameres::{self, assoc::TraitItems, diagnostics::DefDiagnostic}, + nameres::{ + assoc::TraitItems, + diagnostics::{DefDiagnostic, DefDiagnosticKind}, + }, per_ns::PerNs, resolver::{HasResolver, Resolver}, - signatures::{ImplFlags, StaticFlags, TraitFlags, VariantFields}, + signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields}, src::HasSource as _, visibility::visibility_from_ast, }; @@ -67,7 +76,7 @@ use hir_expand::{ }; use hir_ty::{ AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, - GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, + GenericArgData, Interner, ParamKind, ProjectionTy, QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, TyLoweringDiagnostic, ValueTyDefId, WhereClause, all_super_traits, autoderef, check_orphan_rules, consteval::{ConstExt, try_const_usize, unknown_const_as_generic}, @@ -76,11 +85,13 @@ use hir_ty::{ layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, method_resolution, mir::{MutBorrowKind, interpret_mir}, + next_solver::{ + ClauseKind, DbInterner, GenericArgs, infer::InferCtxt, mapping::ChalkToNextSolver, + }, primitive::UintTy, traits::FnTrait, }; use itertools::Itertools; -use nameres::diagnostics::DefDiagnosticKind; use rustc_hash::FxHashSet; use smallvec::SmallVec; use span::{AstIdNode, Edition, FileId}; @@ -103,6 +114,7 @@ pub use crate::{ VisibleTraits, }, }; +use rustc_type_ir::inherent::{IntoKind, SliceLike}; // Be careful with these re-exports. // @@ -117,7 +129,7 @@ pub use { cfg::{CfgAtom, CfgExpr, CfgOptions}, hir_def::{ Complete, - ImportPathConfig, + FindPathConfig, attr::{AttrSourceMap, Attrs, AttrsWithOwner}, find_path::PrefixKind, import_map, @@ -187,6 +199,10 @@ pub struct CrateDependency { } impl Crate { + pub fn base(self) -> base_db::Crate { + self.id + } + pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin { self.id.data(db).origin.clone() } @@ -320,7 +336,6 @@ pub enum ModuleDef { Const(Const), Static(Static), Trait(Trait), - TraitAlias(TraitAlias), TypeAlias(TypeAlias), BuiltinType(BuiltinType), Macro(Macro), @@ -333,7 +348,6 @@ impl_from!( Const, Static, Trait, - TraitAlias, TypeAlias, BuiltinType, Macro @@ -360,7 +374,6 @@ impl ModuleDef { ModuleDef::Const(it) => Some(it.module(db)), ModuleDef::Static(it) => Some(it.module(db)), ModuleDef::Trait(it) => Some(it.module(db)), - ModuleDef::TraitAlias(it) => Some(it.module(db)), ModuleDef::TypeAlias(it) => Some(it.module(db)), ModuleDef::Macro(it) => Some(it.module(db)), ModuleDef::BuiltinType(_) => None, @@ -389,7 +402,6 @@ impl ModuleDef { ModuleDef::Const(it) => it.name(db)?, ModuleDef::Adt(it) => it.name(db), ModuleDef::Trait(it) => it.name(db), - ModuleDef::TraitAlias(it) => it.name(db), ModuleDef::Function(it) => it.name(db), ModuleDef::Variant(it) => it.name(db), ModuleDef::TypeAlias(it) => it.name(db), @@ -412,7 +424,6 @@ impl ModuleDef { Adt::Union(it) => it.id.into(), }, ModuleDef::Trait(it) => it.id.into(), - ModuleDef::TraitAlias(it) => it.id.into(), ModuleDef::Function(it) => it.id.into(), ModuleDef::TypeAlias(it) => it.id.into(), ModuleDef::Module(it) => it.id.into(), @@ -452,7 +463,6 @@ impl ModuleDef { ModuleDef::Module(_) | ModuleDef::Adt(_) | ModuleDef::Trait(_) - | ModuleDef::TraitAlias(_) | ModuleDef::TypeAlias(_) | ModuleDef::Macro(_) | ModuleDef::BuiltinType(_) => None, @@ -465,7 +475,6 @@ impl ModuleDef { ModuleDef::Function(it) => Some(it.into()), ModuleDef::Adt(it) => Some(it.into()), ModuleDef::Trait(it) => Some(it.into()), - ModuleDef::TraitAlias(it) => Some(it.into()), ModuleDef::TypeAlias(it) => Some(it.into()), ModuleDef::Module(_) | ModuleDef::Variant(_) @@ -485,7 +494,6 @@ impl ModuleDef { ModuleDef::Const(it) => it.attrs(db), ModuleDef::Static(it) => it.attrs(db), ModuleDef::Trait(it) => it.attrs(db), - ModuleDef::TraitAlias(it) => it.attrs(db), ModuleDef::TypeAlias(it) => it.attrs(db), ModuleDef::Macro(it) => it.attrs(db), ModuleDef::BuiltinType(_) => return None, @@ -511,7 +519,6 @@ impl HasVisibility for ModuleDef { ModuleDef::Const(it) => it.visibility(db), ModuleDef::Static(it) => it.visibility(db), ModuleDef::Trait(it) => it.visibility(db), - ModuleDef::TraitAlias(it) => it.visibility(db), ModuleDef::TypeAlias(it) => it.visibility(db), ModuleDef::Variant(it) => it.visibility(db), ModuleDef::Macro(it) => it.visibility(db), @@ -949,7 +956,7 @@ impl Module { self, db: &dyn DefDatabase, item: impl Into, - cfg: ImportPathConfig, + cfg: FindPathConfig, ) -> Option { hir_def::find_path::find_path( db, @@ -968,7 +975,7 @@ impl Module { db: &dyn DefDatabase, item: impl Into, prefix_kind: PrefixKind, - cfg: ImportPathConfig, + cfg: FindPathConfig, ) -> Option { hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg) } @@ -1247,6 +1254,25 @@ pub struct Field { pub(crate) id: LocalFieldId, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InstantiatedField<'db> { + pub(crate) inner: Field, + pub(crate) args: GenericArgs<'db>, +} + +impl<'db> InstantiatedField<'db> { + /// Returns the type as in the signature of the struct. + pub fn ty(&self, db: &'db dyn HirDatabase) -> TypeNs<'db> { + let krate = self.inner.krate(db); + let interner = DbInterner::new_with(db, Some(krate.base()), None); + + let var_id = self.inner.parent.into(); + let field = db.field_types_ns(var_id)[self.inner.id]; + let ty = field.instantiate(interner, self.args); + TypeNs::new(db, var_id, ty) + } +} + #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] pub struct TupleField { pub owner: DefWithBodyId, @@ -1362,8 +1388,9 @@ impl Field { } pub fn layout(&self, db: &dyn HirDatabase) -> Result { + let interner = DbInterner::new_with(db, None, None); db.layout_of_ty( - self.ty(db).ty, + self.ty(db).ty.to_nextsolver(interner), db.trait_environment(match hir_def::VariantId::from(self.parent) { hir_def::VariantId::EnumVariantId(id) => { GenericDefId::AdtId(id.lookup(db).parent.into()) @@ -1444,6 +1471,11 @@ impl Struct { pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { db.attrs(self.id.into()).is_unstable() } + + pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedStruct<'db> { + let args = infer_ctxt.fresh_args_for_item(self.id.into()); + InstantiatedStruct { inner: self, args } + } } impl HasVisibility for Struct { @@ -1454,6 +1486,35 @@ impl HasVisibility for Struct { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InstantiatedStruct<'db> { + pub(crate) inner: Struct, + pub(crate) args: GenericArgs<'db>, +} + +impl<'db> InstantiatedStruct<'db> { + pub fn fields(self, db: &dyn HirDatabase) -> Vec> { + self.inner + .id + .fields(db) + .fields() + .iter() + .map(|(id, _)| InstantiatedField { + inner: Field { parent: self.inner.into(), id }, + args: self.args, + }) + .collect() + } + + pub fn ty(self, db: &'db dyn HirDatabase) -> TypeNs<'db> { + let krate = self.inner.krate(db); + let interner = DbInterner::new_with(db, Some(krate.base()), None); + + let ty = db.ty_ns(self.inner.id.into()); + TypeNs::new(db, self.inner.id, ty.instantiate(interner, self.args)) + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Union { pub(crate) id: UnionId, @@ -1598,6 +1659,22 @@ impl HasVisibility for Enum { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InstantiatedEnum<'db> { + pub(crate) inner: Enum, + pub(crate) args: GenericArgs<'db>, +} + +impl<'db> InstantiatedEnum<'db> { + pub fn ty(self, db: &'db dyn HirDatabase) -> TypeNs<'db> { + let krate = self.inner.krate(db); + let interner = DbInterner::new_with(db, Some(krate.base()), None); + + let ty = db.ty_ns(self.inner.id.into()); + TypeNs::new(db, self.inner.id, ty.instantiate(interner, self.args)) + } +} + impl From<&Variant> for DefWithBodyId { fn from(&v: &Variant) -> Self { DefWithBodyId::VariantId(v.into()) @@ -1673,6 +1750,38 @@ impl Variant { pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { db.attrs(self.id.into()).is_unstable() } + + pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedVariant<'db> { + let args = + infer_ctxt.fresh_args_for_item(self.parent_enum(infer_ctxt.interner.db()).id.into()); + InstantiatedVariant { inner: self, args } + } +} + +// FIXME: Rename to `EnumVariant` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct InstantiatedVariant<'db> { + pub(crate) inner: Variant, + pub(crate) args: GenericArgs<'db>, +} + +impl<'db> InstantiatedVariant<'db> { + pub fn parent_enum(self, db: &dyn HirDatabase) -> InstantiatedEnum<'db> { + InstantiatedEnum { inner: self.inner.id.lookup(db).parent.into(), args: self.args } + } + + pub fn fields(self, db: &dyn HirDatabase) -> Vec> { + self.inner + .id + .fields(db) + .fields() + .iter() + .map(|(id, _)| InstantiatedField { + inner: Field { parent: self.inner.into(), id }, + args: self.args, + }) + .collect() + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -1709,12 +1818,15 @@ impl Adt { } pub fn layout(self, db: &dyn HirDatabase) -> Result { + let env = db.trait_environment(self.into()); + let interner = DbInterner::new_with(db, Some(env.krate), env.block); db.layout_of_adt( self.into(), TyBuilder::adt(db, self.into()) .fill_with_defaults(db, || TyKind::Error.intern(Interner)) - .build_into_subst(), - db.trait_environment(self.into()), + .build_into_subst() + .to_nextsolver(interner), + env, ) .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).id).unwrap())) } @@ -2430,11 +2542,28 @@ impl Function { caller: Option, call_edition: Edition, ) -> bool { - let target_features = caller - .map(|caller| hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into()))) - .unwrap_or_default(); + let (target_features, target_feature_is_safe_in_target) = caller + .map(|caller| { + let target_features = + hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into())); + let target_feature_is_safe_in_target = + match &caller.krate(db).id.workspace_data(db).target { + Ok(target) => hir_ty::target_feature_is_safe_in_target(target), + Err(_) => hir_ty::TargetFeatureIsSafeInTarget::No, + }; + (target_features, target_feature_is_safe_in_target) + }) + .unwrap_or_else(|| { + (hir_ty::TargetFeatures::default(), hir_ty::TargetFeatureIsSafeInTarget::No) + }); matches!( - hir_ty::is_fn_unsafe_to_call(db, self.id, &target_features, call_edition), + hir_ty::is_fn_unsafe_to_call( + db, + self.id, + &target_features, + call_edition, + target_feature_is_safe_in_target + ), hir_ty::Unsafety::Unsafe ) } @@ -2931,29 +3060,6 @@ impl HasVisibility for Trait { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TraitAlias { - pub(crate) id: TraitAliasId, -} - -impl TraitAlias { - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db).container } - } - - pub fn name(self, db: &dyn HirDatabase) -> Name { - db.trait_alias_signature(self.id).name.clone() - } -} - -impl HasVisibility for TraitAlias { - fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let loc = self.id.lookup(db); - let source = loc.source(db); - visibility_from_ast(db, self.id, source.map(|src| src.visibility())) - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct TypeAlias { pub(crate) id: TypeAliasId, @@ -3576,7 +3682,6 @@ pub enum GenericDef { Function(Function), Adt(Adt), Trait(Trait), - TraitAlias(TraitAlias), TypeAlias(TypeAlias), Impl(Impl), // consts can have type parameters from their parents (i.e. associated consts of traits) @@ -3587,7 +3692,6 @@ impl_from!( Function, Adt(Struct, Enum, Union), Trait, - TraitAlias, TypeAlias, Impl, Const, @@ -3637,7 +3741,6 @@ impl GenericDef { GenericDef::Function(it) => it.id.into(), GenericDef::Adt(it) => it.into(), GenericDef::Trait(it) => it.id.into(), - GenericDef::TraitAlias(it) => it.id.into(), GenericDef::TypeAlias(it) => it.id.into(), GenericDef::Impl(it) => it.id.into(), GenericDef::Const(it) => it.id.into(), @@ -3662,7 +3765,6 @@ impl GenericDef { GenericDefId::FunctionId(it) => db.function_signature_with_source_map(it).1, GenericDefId::ImplId(it) => db.impl_signature_with_source_map(it).1, GenericDefId::StaticId(_) => return, - GenericDefId::TraitAliasId(it) => db.trait_alias_signature_with_source_map(it).1, GenericDefId::TraitId(it) => db.trait_signature_with_source_map(it).1, GenericDefId::TypeAliasId(it) => db.type_alias_signature_with_source_map(it).1, }; @@ -3672,7 +3774,7 @@ impl GenericDef { push_ty_diagnostics( db, acc, - db.generic_predicates_without_parent_with_diagnostics(def).1, + db.generic_predicates_without_parent_with_diagnostics_ns(def).1, &source_map, ); for (param_id, param) in generics.iter_type_or_consts() { @@ -3699,7 +3801,6 @@ impl GenericDef { GenericDef::Adt(Adt::Enum(_)) => "enum", GenericDef::Adt(Adt::Union(_)) => "union", GenericDef::Trait(_) => "trait", - GenericDef::TraitAlias(_) => "trait alias", GenericDef::TypeAlias(_) => "type alias", GenericDef::Impl(_) => "impl", GenericDef::Const(_) => "constant", @@ -3976,49 +4077,25 @@ impl DeriveHelper { } } -// FIXME: Wrong name? This is could also be a registered attribute #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct BuiltinAttr { - krate: Option, idx: u32, } impl BuiltinAttr { - // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs? - pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option { - if let builtin @ Some(_) = Self::builtin(name) { - return builtin; - } - let idx = crate_def_map(db, krate.id) - .registered_attrs() - .iter() - .position(|it| it.as_str() == name)? as u32; - Some(BuiltinAttr { krate: Some(krate.id), idx }) - } - fn builtin(name: &str) -> Option { hir_expand::inert_attr_macro::find_builtin_attr_idx(&Symbol::intern(name)) - .map(|idx| BuiltinAttr { krate: None, idx: idx as u32 }) + .map(|idx| BuiltinAttr { idx: idx as u32 }) } - pub fn name(&self, db: &dyn HirDatabase) -> Name { - match self.krate { - Some(krate) => Name::new_symbol_root( - crate_def_map(db, krate).registered_attrs()[self.idx as usize].clone(), - ), - None => Name::new_symbol_root(Symbol::intern( - hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name, - )), - } + pub fn name(&self) -> Name { + Name::new_symbol_root(Symbol::intern( + hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name, + )) } - pub fn template(&self, _: &dyn HirDatabase) -> Option { - match self.krate { - Some(_) => None, - None => { - Some(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].template) - } - } + pub fn template(&self) -> Option { + Some(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].template) } } @@ -4154,8 +4231,8 @@ impl TypeParam { pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> { let resolver = self.id.parent().resolver(db); - let ty = - TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner); + let ty = TyKind::Placeholder(hir_ty::to_placeholder_idx_no_index(db, self.id.into())) + .intern(Interner); Type::new_with_resolver_inner(db, &resolver, ty) } @@ -4163,12 +4240,10 @@ impl TypeParam { /// parameter, not additional bounds that might be added e.g. by a method if /// the parameter comes from an impl! pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec { - db.generic_predicates_for_param(self.id.parent(), self.id.into(), None) + db.generic_predicates_for_param_ns(self.id.parent(), self.id.into(), None) .iter() - .filter_map(|pred| match &pred.skip_binders().skip_binders() { - hir_ty::WhereClause::Implemented(trait_ref) => { - Some(Trait::from(trait_ref.hir_trait_id())) - } + .filter_map(|pred| match &pred.kind().skip_binder() { + ClauseKind::Trait(trait_ref) => Some(Trait::from(trait_ref.def_id().0)), _ => None, }) .collect() @@ -4424,14 +4499,13 @@ impl Impl { } pub fn trait_(self, db: &dyn HirDatabase) -> Option { - let trait_ref = db.impl_trait(self.id)?; - let id = trait_ref.skip_binders().hir_trait_id(); - Some(Trait { id }) + let trait_ref = db.impl_trait_ns(self.id)?; + let id = trait_ref.skip_binder().def_id; + Some(Trait { id: id.0 }) } pub fn trait_ref(self, db: &dyn HirDatabase) -> Option> { - let substs = TyBuilder::placeholder_subst(db, self.id); - let trait_ref = db.impl_trait(self.id)?.substitute(Interner, &substs); + let trait_ref = db.impl_trait_ns(self.id)?.instantiate_identity(); let resolver = self.id.resolver(db); Some(TraitRef::new_with_resolver(db, &resolver, trait_ref)) } @@ -4500,7 +4574,7 @@ impl Impl { #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct TraitRef<'db> { env: Arc, - trait_ref: hir_ty::TraitRef, + trait_ref: hir_ty::next_solver::TraitRef<'db>, _pd: PhantomCovariantLifetime<'db>, } @@ -4508,7 +4582,7 @@ impl<'db> TraitRef<'db> { pub(crate) fn new_with_resolver( db: &'db dyn HirDatabase, resolver: &Resolver<'_>, - trait_ref: hir_ty::TraitRef, + trait_ref: hir_ty::next_solver::TraitRef<'db>, ) -> Self { let env = resolver .generic_def() @@ -4517,25 +4591,22 @@ impl<'db> TraitRef<'db> { } pub fn trait_(&self) -> Trait { - let id = self.trait_ref.hir_trait_id(); - Trait { id } + Trait { id: self.trait_ref.def_id.0 } } - pub fn self_ty(&self) -> Type<'_> { - let ty = self.trait_ref.self_type_parameter(Interner); - Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() } + pub fn self_ty(&self) -> TypeNs<'_> { + let ty = self.trait_ref.self_ty(); + TypeNs { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() } } /// Returns `idx`-th argument of this trait reference if it is a type argument. Note that the /// first argument is the `Self` type. - pub fn get_type_argument(&self, idx: usize) -> Option> { - self.trait_ref - .substitution - .as_slice(Interner) - .get(idx) - .and_then(|arg| arg.ty(Interner)) - .cloned() - .map(|ty| Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() }) + pub fn get_type_argument(&self, idx: usize) -> Option> { + self.trait_ref.args.as_slice().get(idx).and_then(|arg| arg.ty()).map(|ty| TypeNs { + env: self.env.clone(), + ty, + _pd: PhantomCovariantLifetime::new(), + }) } } @@ -4591,7 +4662,7 @@ impl Closure { .iter() .map(|capture| Type { env: db.trait_environment_for_body(owner), - ty: capture.ty(&self.subst), + ty: capture.ty(db, &self.subst), _pd: PhantomCovariantLifetime::new(), }) .collect() @@ -4852,42 +4923,80 @@ impl<'db> Type<'db> { } pub fn contains_reference(&self, db: &'db dyn HirDatabase) -> bool { - return go(db, self.env.krate, &self.ty); + return go(db, &self.ty); + + fn is_phantom_data(db: &dyn HirDatabase, adt_id: AdtId) -> bool { + match adt_id { + hir_def::AdtId::StructId(s) => { + let flags = db.struct_signature(s).flags; + flags.contains(StructFlags::IS_PHANTOM_DATA) + } + hir_def::AdtId::UnionId(_) => false, + hir_def::AdtId::EnumId(_) => false, + } + } - fn go(db: &dyn HirDatabase, krate: base_db::Crate, ty: &Ty) -> bool { + fn go(db: &dyn HirDatabase, ty: &Ty) -> bool { match ty.kind(Interner) { // Reference itself TyKind::Ref(_, _, _) => true, // For non-phantom_data adts we check variants/fields as well as generic parameters - TyKind::Adt(adt_id, substitution) - if !db.adt_datum(krate, *adt_id).flags.phantom_data => - { - let adt_datum = &db.adt_datum(krate, *adt_id); - let adt_datum_bound = - adt_datum.binders.clone().substitute(Interner, substitution); - adt_datum_bound - .variants + TyKind::Adt(adt_id, substitution) if !is_phantom_data(db, adt_id.0) => { + let _variant_id_to_fields = |id: VariantId| { + let variant_data = &id.fields(db); + if variant_data.fields().is_empty() { + vec![] + } else { + let field_types = db.field_types(id); + variant_data + .fields() + .iter() + .map(|(idx, _)| { + field_types[idx].clone().substitute(Interner, substitution) + }) + .filter(|it| !it.contains_unknown()) + .collect() + } + }; + let variant_id_to_fields = |_: VariantId| vec![]; + + let variants = match adt_id.0 { + hir_def::AdtId::StructId(id) => { + vec![variant_id_to_fields(id.into())] + } + hir_def::AdtId::EnumId(id) => id + .enum_variants(db) + .variants + .iter() + .map(|&(variant_id, _, _)| variant_id_to_fields(variant_id.into())) + .collect(), + hir_def::AdtId::UnionId(id) => { + vec![variant_id_to_fields(id.into())] + } + }; + + variants .into_iter() - .flat_map(|variant| variant.fields.into_iter()) - .any(|ty| go(db, krate, &ty)) + .flat_map(|variant| variant.into_iter()) + .any(|ty| go(db, &ty)) || substitution .iter(Interner) .filter_map(|x| x.ty(Interner)) - .any(|ty| go(db, krate, ty)) + .any(|ty| go(db, ty)) } // And for `PhantomData`, we check `T`. TyKind::Adt(_, substitution) | TyKind::Tuple(_, substitution) | TyKind::OpaqueType(_, substitution) | TyKind::AssociatedType(_, substitution) - | TyKind::FnDef(_, substitution) => substitution - .iter(Interner) - .filter_map(|x| x.ty(Interner)) - .any(|ty| go(db, krate, ty)), + | TyKind::Alias(AliasTy::Projection(ProjectionTy { substitution, .. })) + | TyKind::FnDef(_, substitution) => { + substitution.iter(Interner).filter_map(|x| x.ty(Interner)).any(|ty| go(db, ty)) + } // For `[T]` or `*T` we check `T` - TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, krate, ty), + TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, ty), // Consider everything else as not reference _ => false, @@ -5072,7 +5181,7 @@ impl<'db> Type<'db> { binders: CanonicalVarKinds::empty(Interner), }; - db.trait_solve(self.env.krate, self.env.block, goal).is_some() + !db.trait_solve(self.env.krate, self.env.block, goal).no_solution() } pub fn normalize_trait_assoc_type( @@ -5520,7 +5629,11 @@ impl<'db> Type<'db> { .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); _ = method_resolution::iterate_method_candidates_dyn( - &canonical, + &canonical.to_nextsolver(DbInterner::new_with( + db, + Some(environment.krate), + environment.block, + )), db, environment, traits_in_scope, @@ -5607,7 +5720,11 @@ impl<'db> Type<'db> { .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); _ = method_resolution::iterate_path_candidates( - &canonical, + &canonical.to_nextsolver(DbInterner::new_with( + db, + Some(environment.krate), + environment.block, + )), db, environment, traits_in_scope, @@ -5720,7 +5837,11 @@ impl<'db> Type<'db> { cb(type_.derived(ty.clone())); walk_substs(db, type_, substs, cb); } - TyKind::AssociatedType(_, substs) => { + TyKind::AssociatedType(_, substs) + | TyKind::Alias(AliasTy::Projection(hir_ty::ProjectionTy { + substitution: substs, + .. + })) => { if ty.associated_type_parent_trait(db).is_some() { cb(type_.derived(ty.clone())); } @@ -5803,7 +5924,7 @@ impl<'db> Type<'db> { pub fn as_type_param(&self, db: &'db dyn HirDatabase) -> Option { match self.ty.kind(Interner) { TyKind::Placeholder(p) => Some(TypeParam { - id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)), + id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p).0), }), _ => None, } @@ -5818,7 +5939,8 @@ impl<'db> Type<'db> { } pub fn layout(&self, db: &'db dyn HirDatabase) -> Result { - db.layout_of_ty(self.ty.clone(), self.env.clone()) + let interner = DbInterner::new_with(db, None, None); + db.layout_of_ty(self.ty.to_nextsolver(interner), self.env.clone()) .map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap())) } @@ -5827,6 +5949,51 @@ impl<'db> Type<'db> { } } +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct TypeNs<'db> { + env: Arc, + ty: hir_ty::next_solver::Ty<'db>, + _pd: PhantomCovariantLifetime<'db>, +} + +impl<'db> TypeNs<'db> { + fn new( + db: &'db dyn HirDatabase, + lexical_env: impl HasResolver, + ty: hir_ty::next_solver::Ty<'db>, + ) -> Self { + let resolver = lexical_env.resolver(db); + let environment = resolver + .generic_def() + .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d)); + TypeNs { env: environment, ty, _pd: PhantomCovariantLifetime::new() } + } + + // FIXME: Find better API that also handles const generics + pub fn impls_trait(&self, infcx: InferCtxt<'db>, trait_: Trait, args: &[TypeNs<'db>]) -> bool { + let args = GenericArgs::new_from_iter( + infcx.interner, + [self.ty].into_iter().chain(args.iter().map(|t| t.ty)).map(|t| t.into()), + ); + let trait_ref = hir_ty::next_solver::TraitRef::new(infcx.interner, trait_.id.into(), args); + + let pred_kind = rustc_type_ir::Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(rustc_type_ir::TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )); + let predicate = hir_ty::next_solver::Predicate::new(infcx.interner, pred_kind); + let goal = hir_ty::next_solver::Goal::new( + infcx.interner, + hir_ty::next_solver::ParamEnv::empty(), + predicate, + ); + let res = hir_ty::traits::next_trait_solve_in_ctxt(&infcx, goal); + res.map_or(false, |res| matches!(res.1, rustc_type_ir::solve::Certainty::Yes)) + } +} + #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] pub struct InlineAsmOperand { owner: DefWithBodyId, @@ -6238,12 +6405,6 @@ impl HasCrate for Trait { } } -impl HasCrate for TraitAlias { - fn krate(&self, db: &dyn HirDatabase) -> Crate { - self.module(db).krate() - } -} - impl HasCrate for Static { fn krate(&self, db: &dyn HirDatabase) -> Crate { self.module(db).krate() @@ -6337,12 +6498,6 @@ impl HasContainer for Trait { } } -impl HasContainer for TraitAlias { - fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - ItemContainer::Module(Module { id: self.id.lookup(db).container }) - } -} - impl HasContainer for ExternBlock { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { ItemContainer::Module(Module { id: self.id.lookup(db).container }) @@ -6476,3 +6631,6 @@ pub fn resolve_absolute_path<'a, I: Iterator + Clone + 'a>( fn as_name_opt(name: Option) -> Name { name.map_or_else(Name::missing, |name| name.as_name()) } + +pub use hir_ty::next_solver; +pub use hir_ty::setup_tracing; diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index d207305b4c61f..5af8659ca669a 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -28,13 +28,13 @@ use hir_expand::{ mod_path::{ModPath, PathKind}, name::AsName, }; -use hir_ty::diagnostics::unsafe_operations_for_body; +use hir_ty::diagnostics::{unsafe_operations, unsafe_operations_for_body}; use intern::{Interned, Symbol, sym}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{SmallVec, smallvec}; use span::{Edition, FileId, SyntaxContext}; -use stdx::TupleExt; +use stdx::{TupleExt, always}; use syntax::{ AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, @@ -46,8 +46,8 @@ use crate::{ Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam, Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef, - Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField, - Type, TypeAlias, TypeParam, Union, Variant, VariantDef, + Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TupleField, Type, + TypeAlias, TypeParam, Union, Variant, VariantDef, db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path}, @@ -85,8 +85,7 @@ impl PathResolution { | ModuleDef::Function(_) | ModuleDef::Module(_) | ModuleDef::Static(_) - | ModuleDef::Trait(_) - | ModuleDef::TraitAlias(_), + | ModuleDef::Trait(_), ) => None, PathResolution::Def(ModuleDef::TypeAlias(alias)) => { Some(TypeNs::TypeAliasId((*alias).into())) @@ -303,6 +302,13 @@ impl Semantics<'_, DB> { self.imp.hir_file_to_module_defs(file.into()) } + pub fn is_nightly(&self, krate: Crate) -> bool { + let toolchain = self.db.toolchain_channel(krate.into()); + // `toolchain == None` means we're in some detached files. Since we have no information on + // the toolchain being used, let's just allow unstable items to be listed. + matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None) + } + pub fn to_adt_def(&self, a: &ast::Adt) -> Option { self.imp.to_def(a) } @@ -343,10 +349,6 @@ impl Semantics<'_, DB> { self.imp.to_def(s) } - pub fn to_trait_alias_def(&self, t: &ast::TraitAlias) -> Option { - self.imp.to_def(t) - } - pub fn to_trait_def(&self, t: &ast::Trait) -> Option { self.imp.to_def(t) } @@ -1241,29 +1243,27 @@ impl<'db> SemanticsImpl<'db> { adt, )) })?; - let mut res = None; for (_, derive_attr, derives) in derives { // as there may be multiple derives registering the same helper // name, we gotta make sure to call this for all of them! // FIXME: We need to call `f` for all of them as well though! - res = res.or(process_expansion_for_token( - ctx, - &mut stack, - derive_attr, - )); + process_expansion_for_token(ctx, &mut stack, derive_attr); for derive in derives.into_iter().flatten() { - res = res - .or(process_expansion_for_token(ctx, &mut stack, derive)); + process_expansion_for_token(ctx, &mut stack, derive); } } // remove all tokens that are within the derives expansion filter_duplicates(tokens, adt.syntax().text_range()); - Some(res) + Some(()) }); // if we found derives, we can early exit. There is no way we can be in any // macro call at this point given we are not in a token tree - if let Some(res) = res { - return res; + if let Some(()) = res { + // Note: derives do not remap the original token. Furthermore, we want + // the original token to be before the derives in the list, because if they + // upmap to the same token and we deduplicate them (e.g. in rename), we + // want the original token to remain, not the derive. + return None; } } // Then check for token trees, that means we are either in a function-like macro or @@ -1772,6 +1772,25 @@ impl<'db> SemanticsImpl<'db> { res } + pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec { + always!(block.unsafe_token().is_some()); + let block = self.wrap_node_infile(ast::Expr::from(block)); + let Some(def) = self.body_for(block.syntax()) else { return Vec::new() }; + let def = def.into(); + let (body, source_map) = self.db.body_with_source_map(def); + let infer = self.db.infer(def); + let Some(ExprOrPatId::ExprId(block)) = source_map.node_expr(block.as_ref()) else { + return Vec::new(); + }; + let mut res = Vec::default(); + unsafe_operations(self.db, &infer, def, &body, block, &mut |node, _| { + if let Ok(node) = source_map.expr_or_pat_syntax(node) { + res.push(node); + } + }); + res + } + pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool { let Some(mac) = self.resolve_macro_call(macro_call) else { return false }; if mac.is_asm_like(self.db) { @@ -2140,7 +2159,6 @@ to_def_impls![ (crate::Enum, ast::Enum, enum_to_def), (crate::Union, ast::Union, union_to_def), (crate::Trait, ast::Trait, trait_to_def), - (crate::TraitAlias, ast::TraitAlias, trait_alias_to_def), (crate::Impl, ast::Impl, impl_to_def), (crate::TypeAlias, ast::TypeAlias, type_alias_to_def), (crate::Const, ast::Const, const_to_def), @@ -2249,6 +2267,11 @@ impl<'db> SemanticsScope<'db> { } } + /// Checks if a trait is in scope, either because of an import or because we're in an impl of it. + pub fn can_use_trait_methods(&self, t: Trait) -> bool { + self.resolver.traits_in_scope(self.db).contains(&t.id) + } + /// Resolve a path as-if it was written at the given scope. This is /// necessary a heuristic, as it doesn't take hygiene into account. pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option { @@ -2296,18 +2319,19 @@ impl<'db> SemanticsScope<'db> { /// Iterates over associated types that may be specified after the given path (using /// `Ty::Assoc` syntax). - pub fn assoc_type_shorthand_candidates( + pub fn assoc_type_shorthand_candidates( &self, resolution: &PathResolution, - mut cb: impl FnMut(&Name, TypeAlias) -> Option, - ) -> Option { - let def = self.resolver.generic_def()?; - hir_ty::associated_type_shorthand_candidates( - self.db, - def, - resolution.in_type_ns()?, - |name, id| cb(name, id.into()), - ) + mut cb: impl FnMut(TypeAlias), + ) { + let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns()) + else { + return; + }; + hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| { + cb(id.into()); + false + }); } pub fn generic_def(&self) -> Option { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs index e7db93d375d33..5019a5987e513 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs @@ -154,9 +154,6 @@ impl ChildBySource for ItemScope { } ModuleDefId::StaticId(id) => insert_item_loc(db, map, file_id, id, keys::STATIC), ModuleDefId::TraitId(id) => insert_item_loc(db, map, file_id, id, keys::TRAIT), - ModuleDefId::TraitAliasId(id) => { - insert_item_loc(db, map, file_id, id, keys::TRAIT_ALIAS) - } ModuleDefId::AdtId(adt) => match adt { AdtId::StructId(id) => insert_item_loc(db, map, file_id, id, keys::STRUCT), AdtId::UnionId(id) => insert_item_loc(db, map, file_id, id, keys::UNION), diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 71ee0f6938960..44df4d8fc8c80 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -89,8 +89,8 @@ use either::Either; use hir_def::{ AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, - Lookup, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, - UnionId, UseId, VariantId, + Lookup, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, + UseId, VariantId, dyn_map::{ DynMap, keys::{self, Key}, @@ -252,12 +252,6 @@ impl SourceToDefCtx<'_, '_> { pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option { self.to_def(src, keys::TRAIT) } - pub(super) fn trait_alias_to_def( - &mut self, - src: InFile<&ast::TraitAlias>, - ) -> Option { - self.to_def(src, keys::TRAIT_ALIAS) - } pub(super) fn impl_to_def(&mut self, src: InFile<&ast::Impl>) -> Option { self.to_def(src, keys::IMPL) } @@ -555,9 +549,6 @@ impl SourceToDefCtx<'_, '_> { } ast::Item::Enum(it) => this.enum_to_def(InFile::new(file_id, it)).map(Into::into), ast::Item::Trait(it) => this.trait_to_def(InFile::new(file_id, it)).map(Into::into), - ast::Item::TraitAlias(it) => { - this.trait_alias_to_def(InFile::new(file_id, it)).map(Into::into) - } ast::Item::TypeAlias(it) => { this.type_alias_to_def(InFile::new(file_id, it)).map(Into::into) } @@ -636,9 +627,6 @@ impl SourceToDefCtx<'_, '_> { ast::Item::TypeAlias(it) => ChildContainer::GenericDefId( self.type_alias_to_def(container.with_value(it))?.into(), ), - ast::Item::TraitAlias(it) => ChildContainer::GenericDefId( - self.trait_alias_to_def(container.with_value(it))?.into(), - ), ast::Item::Struct(it) => { let def = self.struct_to_def(container.with_value(it))?; let is_in_body = it.field_list().is_some_and(|it| { diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index d25fb1d8cdb7e..539b25387aef0 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -10,10 +10,11 @@ use std::iter::{self, once}; use crate::{ Adt, AssocItem, BindingMode, BuiltinAttr, BuiltinType, Callable, Const, DeriveHelper, Field, Function, GenericSubstitution, Local, Macro, ModuleDef, Static, Struct, ToolModule, Trait, - TraitAlias, TupleField, Type, TypeAlias, Variant, + TupleField, Type, TypeAlias, Variant, db::HirDatabase, semantics::{PathResolution, PathResolutionPerNs}, }; +use base_db::salsa; use either::Either; use hir_def::{ AdtId, AssocItemId, CallableDefId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId, @@ -1061,8 +1062,7 @@ impl<'db> SourceAnalyzer<'db> { // in this case we have to check for inert/builtin attributes and tools and prioritize // resolution of attributes over other namespaces if let Some(name_ref) = path.as_single_name_ref() { - let builtin = - BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text()); + let builtin = BuiltinAttr::builtin(&name_ref.text()); if builtin.is_some() { return builtin.map(|it| (PathResolution::BuiltinAttr(it), None)); } @@ -1282,7 +1282,7 @@ impl<'db> SourceAnalyzer<'db> { { let mut is_unsafe = false; let mut walk_expr = |expr_id| { - unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| { + unsafe_operations(db, infer, def, body, expr_id, &mut |_, inside_unsafe_block| { is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No }) }; @@ -1587,19 +1587,20 @@ fn resolve_hir_path_( TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), - TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()), TypeNs::ModuleId(it) => PathResolution::Def(ModuleDef::Module(it.into())), }; match unresolved { Some(unresolved) => resolver .generic_def() .and_then(|def| { - hir_ty::associated_type_shorthand_candidates( - db, - def, - res.in_type_ns()?, - |name, id| (name == unresolved.name).then_some(id), - ) + salsa::attach(db, || { + hir_ty::associated_type_shorthand_candidates( + db, + def, + res.in_type_ns()?, + |name, _| name == unresolved.name, + ) + }) }) .map(TypeAlias::from) .map(Into::into) @@ -1737,7 +1738,6 @@ fn resolve_hir_path_qualifier( TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), - TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()), TypeNs::ModuleId(it) => PathResolution::Def(ModuleDef::Module(it.into())), }; match unresolved { @@ -1748,7 +1748,7 @@ fn resolve_hir_path_qualifier( db, def, res.in_type_ns()?, - |name, id| (name == unresolved.name).then_some(id), + |name, _| name == unresolved.name, ) }) .map(TypeAlias::from) diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index dca10193e29bf..d8c624e5c6896 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -148,9 +148,6 @@ impl<'a> SymbolCollector<'a> { let trait_do_not_complete = this.push_decl(id, name, false, None); this.collect_from_trait(id, trait_do_not_complete); } - ModuleDefId::TraitAliasId(id) => { - this.push_decl(id, name, false, None); - } ModuleDefId::TypeAliasId(id) => { this.push_decl(id, name, false, None); } diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs index 78f534d014b90..e56f9e91e3f33 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs @@ -1,6 +1,6 @@ //! Type tree for term search -use hir_def::ImportPathConfig; +use hir_def::FindPathConfig; use hir_expand::mod_path::ModPath; use hir_ty::{ db::HirDatabase, @@ -18,7 +18,7 @@ use crate::{ fn mod_item_path( sema_scope: &SemanticsScope<'_>, def: &ModuleDef, - cfg: ImportPathConfig, + cfg: FindPathConfig, ) -> Option { let db = sema_scope.db; let m = sema_scope.module(); @@ -29,7 +29,7 @@ fn mod_item_path( fn mod_item_path_str( sema_scope: &SemanticsScope<'_>, def: &ModuleDef, - cfg: ImportPathConfig, + cfg: FindPathConfig, edition: Edition, ) -> Result { let path = mod_item_path(sema_scope, def, cfg); @@ -103,7 +103,7 @@ impl<'db> Expr<'db> { &self, sema_scope: &SemanticsScope<'db>, many_formatter: &mut dyn FnMut(&Type<'db>) -> String, - cfg: ImportPathConfig, + cfg: FindPathConfig, display_target: DisplayTarget, ) -> Result { let db = sema_scope.db; @@ -380,7 +380,7 @@ impl<'db> Expr<'db> { fn container_name( container: AssocItemContainer, sema_scope: &SemanticsScope<'_>, - cfg: ImportPathConfig, + cfg: FindPathConfig, edition: Edition, display_target: DisplayTarget, ) -> Result { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs index 57ced8d8534b2..597d035ebd857 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs @@ -4,8 +4,12 @@ //! module, and we use to statically check that we only produce snippet //! assists if we are allowed to. -use hir::ImportPathConfig; -use ide_db::{SnippetCap, assists::ExprFillDefaultMode, imports::insert_use::InsertUseConfig}; +use hir::FindPathConfig; +use ide_db::{ + SnippetCap, + assists::ExprFillDefaultMode, + imports::{import_assets::ImportPathConfig, insert_use::InsertUseConfig}, +}; use crate::AssistKind; @@ -31,7 +35,15 @@ impl AssistConfig { prefer_no_std: self.prefer_no_std, prefer_prelude: self.prefer_prelude, prefer_absolute: self.prefer_absolute, - allow_unstable: true, + } + } + + pub fn find_path_confg(&self, allow_unstable: bool) -> FindPathConfig { + FindPathConfig { + prefer_no_std: self.prefer_no_std, + prefer_prelude: self.prefer_prelude, + prefer_absolute: self.prefer_absolute, + allow_unstable, } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 11201afb8a7f2..7e03eb30304bf 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -2418,6 +2418,55 @@ pub struct MyStruct; impl other_file_2::Trait for MyStruct { $0type Iter; +}"#, + ); + } + + #[test] + fn test_qualify_ident_pat_in_default_members() { + check_assist( + add_missing_default_members, + r#" +//- /lib.rs crate:b new_source_root:library +pub enum State { + Active, + Inactive, +} + +use State::*; + +pub trait Checker { + fn check(&self) -> State; + + fn is_active(&self) -> bool { + match self.check() { + Active => true, + Inactive => false, + } + } +} +//- /main.rs crate:a deps:b +struct MyChecker; + +impl b::Checker for MyChecker { + fn check(&self) -> b::State { + todo!(); + }$0 +}"#, + r#" +struct MyChecker; + +impl b::Checker for MyChecker { + fn check(&self) -> b::State { + todo!(); + } + + $0fn is_active(&self) -> bool { + match self.check() { + b::State::Active => true, + b::State::Inactive => false, + } + } }"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 1ece7ddab101e..4d3212c515f28 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -1,7 +1,7 @@ use std::iter::{self, Peekable}; use either::Either; -use hir::{Adt, AsAssocItem, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym}; +use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics, sym}; use ide_db::RootDatabase; use ide_db::assists::ExprFillDefaultMode; use ide_db::syntax_helpers::suggest_name; @@ -76,12 +76,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) .filter(|pat| !matches!(pat, Pat::WildcardPat(_))) .collect(); - let cfg = ctx.config.import_path_config(); - let make = SyntaxFactory::with_mappings(); let scope = ctx.sema.scope(expr.syntax())?; let module = scope.module(); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(scope.krate())); let self_ty = if ctx.config.prefer_self_ty { scope .containing_function() @@ -498,7 +497,7 @@ fn build_pat( make: &SyntaxFactory, module: hir::Module, var: ExtendedVariant, - cfg: ImportPathConfig, + cfg: FindPathConfig, ) -> Option { let db = ctx.db(); match var { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs index 9b9f0c4522ed2..7119d5b9c23eb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs @@ -65,11 +65,13 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { if field.visibility().is_some() { return None; } + check_is_not_variant(&field)?; (vis_offset(field.syntax()), field_name.syntax().text_range()) } else if let Some(field) = ctx.find_node_at_offset::() { if field.visibility().is_some() { return None; } + check_is_not_variant(&field)?; (vis_offset(field.syntax()), field.syntax().text_range()) } else { return None; @@ -134,6 +136,11 @@ fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> { None } +fn check_is_not_variant(field: &impl AstNode) -> Option<()> { + let kind = field.syntax().parent()?.parent()?.kind(); + (kind != SyntaxKind::VARIANT).then_some(()) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; @@ -239,6 +246,13 @@ mod tests { ); } + #[test] + fn not_applicable_for_enum_variant_fields() { + check_assist_not_applicable(change_visibility, r"pub enum Foo { Foo1($0i32) }"); + + check_assist_not_applicable(change_visibility, r"pub enum Foo { Foo1 { $0n: i32 } }"); + } + #[test] fn change_visibility_target() { check_assist_target(change_visibility, "$0fn foo() {}", "fn"); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs index f73b8c4fd0f19..80445578fcef9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs @@ -13,12 +13,7 @@ use ide_db::{ use itertools::Itertools; use syntax::{ AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, - ast::{ - self, HasName, - edit::IndentLevel, - edit_in_place::{AttrsOwnerEdit, Indent}, - make, - }, + ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, make}, }; use crate::{ @@ -334,8 +329,6 @@ fn augment_references_with_imports( ) -> Vec { let mut visited_modules = FxHashSet::default(); - let cfg = ctx.config.import_path_config(); - let edition = target_module.krate().edition(ctx.db()); references .into_iter() @@ -350,22 +343,27 @@ fn augment_references_with_imports( { visited_modules.insert(ref_module); - let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema); - let path = ref_module - .find_use_path( - ctx.sema.db, - ModuleDef::Module(*target_module), - ctx.config.insert_use.prefix_kind, - cfg, - ) - .map(|mod_path| { - make::path_concat( - mod_path_to_ast(&mod_path, edition), - make::path_from_text("Bool"), - ) - }); + ImportScope::find_insert_use_container(name.syntax(), &ctx.sema).and_then( + |import_scope| { + let cfg = + ctx.config.find_path_confg(ctx.sema.is_nightly(target_module.krate())); + let path = ref_module + .find_use_path( + ctx.sema.db, + ModuleDef::Module(*target_module), + ctx.config.insert_use.prefix_kind, + cfg, + ) + .map(|mod_path| { + make::path_concat( + mod_path_to_ast(&mod_path, edition), + make::path_from_text("Bool"), + ) + })?; - import_scope.zip(path) + Some((import_scope, path)) + }, + ) } else { None }; @@ -506,18 +504,6 @@ fn node_to_insert_before(target_node: SyntaxNode) -> SyntaxNode { } fn make_bool_enum(make_pub: bool) -> ast::Enum { - let enum_def = make::enum_( - if make_pub { Some(make::visibility_pub()) } else { None }, - make::name("Bool"), - None, - None, - make::variant_list(vec![ - make::variant(None, make::name("True"), None, None), - make::variant(None, make::name("False"), None, None), - ]), - ) - .clone_for_update(); - let derive_eq = make::attr_outer(make::meta_token_tree( make::ext::ident_path("derive"), make::token_tree( @@ -529,11 +515,19 @@ fn make_bool_enum(make_pub: bool) -> ast::Enum { NodeOrToken::Token(make::tokens::ident("Eq")), ], ), - )) - .clone_for_update(); - enum_def.add_attr(derive_eq); - - enum_def + )); + make::enum_( + [derive_eq], + if make_pub { Some(make::visibility_pub()) } else { None }, + make::name("Bool"), + None, + None, + make::variant_list(vec![ + make::variant(None, make::name("True"), None, None), + make::variant(None, make::name("False"), None, None), + ]), + ) + .clone_for_update() } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs index 916bb67ebb405..3dd435d9423b2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs @@ -235,6 +235,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>) Some(make::ret_type(make::ty(&ret_ty))) }; let mut fn_ = make::fn_( + None, None, closure_name_or_default.clone(), closure_type_params, @@ -804,6 +805,7 @@ impl A { ); } + #[ignore = "FIXME(next-solver): Fix async closures"] #[test] fn replaces_async_closure_with_async_fn() { check_assist( @@ -1065,7 +1067,7 @@ fn foo() { r#" fn foo() { let (mut a, b) = (0.1, "abc"); - fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) { + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { *a = 1.2; let c = *b; } @@ -1097,7 +1099,7 @@ fn foo() { r#" fn foo() { let (mut a, b) = (0.1, "abc"); - fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) { + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { let _: &mut bool = p2; *a = 1.2; let c = *b; @@ -1135,7 +1137,7 @@ fn foo() { r#" fn foo() { let (mut a, b) = (0.1, "abc"); - fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) { + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { let _: &mut bool = p2; *a = 1.2; let c = *b; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs index f1cc3d90b9c56..7d8b763d8b87b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs @@ -96,6 +96,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> } let error_type = ast::AssocItem::TypeAlias(make::ty_alias( + None, "Error", None, None, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs index 846f4e9b258ae..bc76ade97f69d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs @@ -14,6 +14,9 @@ use crate::{AssistContext, AssistId, Assists, GroupLabel}; // const _: i32 = 0b1010; // ``` pub(crate) fn convert_integer_literal(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + if !ctx.has_empty_selection() { + return None; + } let literal = ctx.find_node_at_offset::()?; let literal = match literal.kind() { ast::LiteralKind::IntNumber(it) => it, @@ -265,4 +268,9 @@ mod tests { 111111111111111111111111111111111111111111111111111111111111111111111111$0;"; check_assist_not_applicable(convert_integer_literal, before); } + + #[test] + fn convert_non_empty_selection_literal() { + check_assist_not_applicable(convert_integer_literal, "const _: i32 = $00b1010$0;"); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs index 3d9cde0e0a67c..3a464a3dc6aae 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -43,7 +43,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - return None; } - let cfg = ctx.config.import_path_config(); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(module.krate())); let src_type_path = { let src_type_path = src_type.syntax().descendants().find_map(ast::Path::cast)?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs index 9126e869b9a05..1a6d176c9054c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs @@ -1,7 +1,7 @@ use ide_db::defs::{Definition, NameRefClass}; use syntax::{ AstNode, SyntaxNode, - ast::{self, HasName, Name, syntax_factory::SyntaxFactory}, + ast::{self, HasName, Name, edit::AstNodeEdit, syntax_factory::SyntaxFactory}, syntax_editor::SyntaxEditor, }; @@ -45,7 +45,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<' return None; } - let diverging_arm_expr = match diverging_arm.expr()? { + let diverging_arm_expr = match diverging_arm.expr()?.dedent(1.into()) { ast::Expr::BlockExpr(block) if block.modifier().is_none() && block.label().is_none() => { block.to_string() } @@ -150,7 +150,12 @@ fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> Syn } } editor.add_mappings(make.finish_with_mappings()); - editor.finish().new_root().clone() + let new_node = editor.finish().new_root().clone(); + if let Some(pat) = ast::Pat::cast(new_node.clone()) { + pat.dedent(1.into()).syntax().clone() + } else { + new_node + } } #[cfg(test)] @@ -209,6 +214,53 @@ fn foo(opt: Option) -> Result { ); } + #[test] + fn indent_level() { + check_assist( + convert_match_to_let_else, + r#" +//- minicore: option +enum Foo { + A(u32), + B(u32), + C(String), +} + +fn foo(opt: Option) -> Result { + let mut state = 2; + let va$0lue = match opt { + Some( + Foo::A(it) + | Foo::B(it) + ) => it, + _ => { + state = 3; + return Err(()) + }, + }; +} + "#, + r#" +enum Foo { + A(u32), + B(u32), + C(String), +} + +fn foo(opt: Option) -> Result { + let mut state = 2; + let Some( + Foo::A(value) + | Foo::B(value) + ) = opt else { + state = 3; + return Err(()) + }; +} + "#, + ); + } + #[test] fn should_not_be_applicable_if_extracting_arm_is_not_an_identity_expr() { cov_mark::check_count!(extracting_arm_is_not_an_identity_expr, 2); @@ -489,9 +541,9 @@ fn f() { r#" fn f() { let Some(x) = Some(()) else {//comment - println!("nope"); - return - }; + println!("nope"); + return + }; } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 247c1011589bb..80ffb4db3e84d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -184,8 +184,6 @@ fn augment_references_with_imports( ) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> { let mut visited_modules = FxHashSet::default(); - let cfg = ctx.config.import_path_config(); - references .iter() .filter_map(|FileReference { name, .. }| { @@ -201,6 +199,7 @@ fn augment_references_with_imports( { visited_modules.insert(ref_module); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(ref_module.krate())); let import_scope = ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema); let path = ref_module diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs index b8c647ac8b71d..397327cb4ff8c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -86,9 +86,8 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option { let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1; if needs_braces { - ted::replace(star, expanded.syntax()) + editor.replace(star, expanded.syntax()) } else { let without_braces = expanded .syntax() .children_with_tokens() .filter(|child| !matches!(child.kind(), T!['{'] | T!['}'])) .collect(); - ted::replace_with_many(star, without_braces) + editor.replace_with_many(star, without_braces) } } None => never!(), } + builder.add_file_edits(ctx.vfs_file_id(), editor); } fn get_export_visibility_kind(use_item: &Use) -> VisibilityKind { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index cdc0e967101a4..e3c7ea1b09391 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -7,8 +7,8 @@ use itertools::Itertools; use syntax::{ AstNode, AstToken, NodeOrToken, SyntaxKind::WHITESPACE, - T, - ast::{self, make, syntax_factory::SyntaxFactory}, + SyntaxToken, T, + ast::{self, TokenTree, make, syntax_factory::SyntaxFactory}, }; // Assist: extract_expressions_from_format_string @@ -58,10 +58,11 @@ pub(crate) fn extract_expressions_from_format_string( tt.syntax().text_range(), |edit| { // Extract existing arguments in macro - let tokens = tt.token_trees_and_tokens().collect_vec(); + let mut raw_tokens = tt.token_trees_and_tokens().skip(1).collect_vec(); + let format_string_index = format_str_index(&raw_tokens, &fmt_string); + let tokens = raw_tokens.split_off(format_string_index); let existing_args = if let [ - _opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., @@ -90,9 +91,11 @@ pub(crate) fn extract_expressions_from_format_string( // Start building the new args let mut existing_args = existing_args.into_iter(); - let mut new_tt_bits = vec![NodeOrToken::Token(make::tokens::literal(&new_fmt))]; + let mut new_tt_bits = raw_tokens; let mut placeholder_indexes = vec![]; + new_tt_bits.push(NodeOrToken::Token(make::tokens::literal(&new_fmt))); + for arg in extracted_args { if matches!(arg, Arg::Expr(_) | Arg::Placeholder) { // insert ", " before each arg @@ -150,7 +153,9 @@ pub(crate) fn extract_expressions_from_format_string( } // Add the final tabstop after the format literal - if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) { + if let Some(NodeOrToken::Token(literal)) = + new_tt.token_trees_and_tokens().nth(1 + format_string_index) + { let annotation = edit.make_tabstop_after(cap); editor.add_annotation(literal, annotation); } @@ -163,6 +168,17 @@ pub(crate) fn extract_expressions_from_format_string( Some(()) } +fn format_str_index( + raw_tokens: &[NodeOrToken], + fmt_string: &ast::String, +) -> usize { + let fmt_string = fmt_string.syntax(); + raw_tokens + .iter() + .position(|tt| tt.as_token().is_some_and(|tt| tt == fmt_string)) + .unwrap_or_default() +} + #[cfg(test)] mod tests { use super::*; @@ -186,6 +202,24 @@ fn main() { ); } + #[test] + fn multiple_middle_arg_on_write() { + check_assist( + extract_expressions_from_format_string, + r#" +//- minicore: write +fn main() { + write!(writer(), "{} {x + 1:b} {}$0", y + 2, 2); +} +"#, + r#" +fn main() { + write!(writer(), "{} {:b} {}"$0, y + 2, x + 1, 2); +} +"#, + ); + } + #[test] fn single_arg() { check_assist( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 890b8dd64126e..90a5139dd32c1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -209,11 +209,12 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow(); if let Some(control_flow_enum) = control_flow_enum { + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(module.krate())); let mod_path = module.find_use_path( ctx.sema.db, ModuleDef::from(control_flow_enum), ctx.config.insert_use.prefix_kind, - ctx.config.import_path_config(), + cfg, ); if let Some(mod_path) = mod_path { @@ -1641,6 +1642,7 @@ fn format_function( let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun); make::fn_( + None, None, fun_name, generic_params, @@ -5042,7 +5044,7 @@ fn main() { fun_name(bar); } -fn $0fun_name(bar: &'static str) { +fn $0fun_name(bar: &str) { m!(bar); } "#, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs index c6a6b97df8245..dad19bfb8a2c8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs @@ -1,6 +1,5 @@ -use std::iter; +use std::ops::RangeInclusive; -use either::Either; use hir::{HasSource, ModuleSource}; use ide_db::{ FileId, FxHashMap, FxHashSet, @@ -82,7 +81,15 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti curr_parent_module = ast::Module::cast(mod_syn_opt); } - let mut module = extract_target(&node, ctx.selection_trimmed())?; + let selection_range = ctx.selection_trimmed(); + let (mut module, module_text_range) = if let Some(item) = ast::Item::cast(node.clone()) { + let module = extract_single_target(&item); + (module, node.text_range()) + } else { + let (module, range) = extract_child_target(&node, selection_range)?; + let module_text_range = range.start().text_range().cover(range.end().text_range()); + (module, module_text_range) + }; if module.body_items.is_empty() { return None; } @@ -92,7 +99,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti acc.add( AssistId::refactor_extract("extract_module"), "Extract Module", - module.text_range, + module_text_range, |builder| { //This takes place in three steps: // @@ -110,17 +117,17 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti //for change_visibility and usages for first point mentioned above in the process let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) = - module.get_usages_and_record_fields(ctx); + module.get_usages_and_record_fields(ctx, module_text_range); builder.edit_file(ctx.vfs_file_id()); use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| { builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}")); }); - let import_paths_to_be_removed = module.resolve_imports(curr_parent_module, ctx); + let import_items = module.resolve_imports(curr_parent_module, ctx); module.change_visibility(record_fields); - let module_def = generate_module_def(&impl_parent, &mut module, old_item_indent); + let module_def = generate_module_def(&impl_parent, module, old_item_indent).to_string(); let mut usages_to_be_processed_for_cur_file = vec![]; for (file_id, usages) in usages_to_be_processed { @@ -157,15 +164,12 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti builder.insert(impl_.syntax().text_range().end(), format!("\n\n{module_def}")); } else { - for import_path_text_range in import_paths_to_be_removed { - if module.text_range.intersect(import_path_text_range).is_some() { - module.text_range = module.text_range.cover(import_path_text_range); - } else { - builder.delete(import_path_text_range); + for import_item in import_items { + if !module_text_range.contains_range(import_item) { + builder.delete(import_item); } } - - builder.replace(module.text_range, module_def) + builder.replace(module_text_range, module_def) } }, ) @@ -173,38 +177,50 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti fn generate_module_def( parent_impl: &Option, - module: &mut Module, + module: Module, old_indent: IndentLevel, -) -> String { - let (items_to_be_processed, new_item_indent) = if parent_impl.is_some() { - (Either::Left(module.body_items.iter()), old_indent + 2) +) -> ast::Module { + let Module { name, body_items, use_items } = module; + let items = if let Some(self_ty) = parent_impl.as_ref().and_then(|imp| imp.self_ty()) { + let assoc_items = body_items + .into_iter() + .map(|item| item.syntax().clone()) + .filter_map(ast::AssocItem::cast) + .map(|it| it.indent(IndentLevel(1))) + .collect_vec(); + let assoc_item_list = make::assoc_item_list(Some(assoc_items)); + let impl_ = make::impl_(None, None, None, self_ty.clone(), None, Some(assoc_item_list)); + // Add the import for enum/struct corresponding to given impl block + let use_impl = make_use_stmt_of_node_with_super(self_ty.syntax()); + let mut module_body_items = use_items; + module_body_items.insert(0, use_impl); + module_body_items.push(ast::Item::Impl(impl_)); + module_body_items } else { - (Either::Right(module.use_items.iter().chain(module.body_items.iter())), old_indent + 1) + [use_items, body_items].concat() }; - let mut body = items_to_be_processed - .map(|item| item.indent(IndentLevel(1))) - .map(|item| format!("{new_item_indent}{item}")) - .join("\n\n"); + let items = items.into_iter().map(|it| it.reset_indent().indent(IndentLevel(1))).collect_vec(); + let module_body = make::item_list(Some(items)); - if let Some(self_ty) = parent_impl.as_ref().and_then(|imp| imp.self_ty()) { - let impl_indent = old_indent + 1; - body = format!("{impl_indent}impl {self_ty} {{\n{body}\n{impl_indent}}}"); + let module_name = make::name(name); + make::mod_(module_name, Some(module_body)).indent(old_indent) +} - // Add the import for enum/struct corresponding to given impl block - module.make_use_stmt_of_node_with_super(self_ty.syntax()); - for item in module.use_items.iter() { - body = format!("{impl_indent}{item}\n\n{body}"); - } - } +fn make_use_stmt_of_node_with_super(node_syntax: &SyntaxNode) -> ast::Item { + let super_path = make::ext::ident_path("super"); + let node_path = make::ext::ident_path(&node_syntax.to_string()); + let use_ = make::use_( + None, + None, + make::use_tree(make::join_paths(vec![super_path, node_path]), None, None, false), + ); - let module_name = module.name; - format!("mod {module_name} {{\n{body}\n{old_indent}}}") + ast::Item::from(use_) } #[derive(Debug)] struct Module { - text_range: TextRange, name: &'static str, /// All items except use items. body_items: Vec, @@ -214,22 +230,37 @@ struct Module { use_items: Vec, } -fn extract_target(node: &SyntaxNode, selection_range: TextRange) -> Option { +fn extract_single_target(node: &ast::Item) -> Module { + let (body_items, use_items) = if matches!(node, ast::Item::Use(_)) { + (Vec::new(), vec![node.clone()]) + } else { + (vec![node.clone()], Vec::new()) + }; + let name = "modname"; + Module { name, body_items, use_items } +} + +fn extract_child_target( + node: &SyntaxNode, + selection_range: TextRange, +) -> Option<(Module, RangeInclusive)> { let selected_nodes = node .children() .filter(|node| selection_range.contains_range(node.text_range())) - .chain(iter::once(node.clone())); - let (use_items, body_items) = selected_nodes .filter_map(ast::Item::cast) - .partition(|item| matches!(item, ast::Item::Use(..))); - - Some(Module { text_range: selection_range, name: "modname", body_items, use_items }) + .collect_vec(); + let start = selected_nodes.first()?.syntax().clone(); + let end = selected_nodes.last()?.syntax().clone(); + let (use_items, body_items): (Vec, Vec) = + selected_nodes.into_iter().partition(|item| matches!(item, ast::Item::Use(..))); + Some((Module { name: "modname", body_items, use_items }, start..=end)) } impl Module { fn get_usages_and_record_fields( &self, ctx: &AssistContext<'_>, + replace_range: TextRange, ) -> (FxHashMap>, Vec, FxHashMap) { let mut adt_fields = Vec::new(); @@ -247,7 +278,7 @@ impl Module { ast::Adt(it) => { if let Some( nod ) = ctx.sema.to_def(&it) { let node_def = Definition::Adt(nod); - self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted); + self.expand_and_group_usages_file_wise(ctx, replace_range,node_def, &mut refs, &mut use_stmts_to_be_inserted); //Enum Fields are not allowed to explicitly specify pub, it is implied match it { @@ -281,30 +312,30 @@ impl Module { ast::TypeAlias(it) => { if let Some( nod ) = ctx.sema.to_def(&it) { let node_def = Definition::TypeAlias(nod); - self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted); + self.expand_and_group_usages_file_wise(ctx,replace_range, node_def, &mut refs, &mut use_stmts_to_be_inserted); } }, ast::Const(it) => { if let Some( nod ) = ctx.sema.to_def(&it) { let node_def = Definition::Const(nod); - self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted); + self.expand_and_group_usages_file_wise(ctx,replace_range, node_def, &mut refs, &mut use_stmts_to_be_inserted); } }, ast::Static(it) => { if let Some( nod ) = ctx.sema.to_def(&it) { let node_def = Definition::Static(nod); - self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted); + self.expand_and_group_usages_file_wise(ctx,replace_range, node_def, &mut refs, &mut use_stmts_to_be_inserted); } }, ast::Fn(it) => { if let Some( nod ) = ctx.sema.to_def(&it) { let node_def = Definition::Function(nod); - self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted); + self.expand_and_group_usages_file_wise(ctx,replace_range, node_def, &mut refs, &mut use_stmts_to_be_inserted); } }, ast::Macro(it) => { if let Some(nod) = ctx.sema.to_def(&it) { - self.expand_and_group_usages_file_wise(ctx, Definition::Macro(nod), &mut refs, &mut use_stmts_to_be_inserted); + self.expand_and_group_usages_file_wise(ctx,replace_range, Definition::Macro(nod), &mut refs, &mut use_stmts_to_be_inserted); } }, _ => (), @@ -318,6 +349,7 @@ impl Module { fn expand_and_group_usages_file_wise( &self, ctx: &AssistContext<'_>, + replace_range: TextRange, node_def: Definition, refs_in_files: &mut FxHashMap>, use_stmts_to_be_inserted: &mut FxHashMap, @@ -327,7 +359,7 @@ impl Module { syntax::NodeOrToken::Node(node) => node, syntax::NodeOrToken::Token(tok) => tok.parent().unwrap(), // won't panic }; - let out_of_sel = |node: &SyntaxNode| !self.text_range.contains_range(node.text_range()); + let out_of_sel = |node: &SyntaxNode| !replace_range.contains_range(node.text_range()); let mut use_stmts_set = FxHashSet::default(); for (file_id, refs) in node_def.usages(&ctx.sema).all() { @@ -527,7 +559,8 @@ impl Module { // mod -> ust_stmt transversal // true | false -> super import insertion // true | true -> super import insertion - self.make_use_stmt_of_node_with_super(use_node); + let super_use_node = make_use_stmt_of_node_with_super(use_node); + self.use_items.insert(0, super_use_node); } None => {} } @@ -556,7 +589,8 @@ impl Module { use_tree_paths = Some(use_tree_str); } else if def_in_mod && def_out_sel { - self.make_use_stmt_of_node_with_super(use_node); + let super_use_node = make_use_stmt_of_node_with_super(use_node); + self.use_items.insert(0, super_use_node); } } @@ -579,12 +613,12 @@ impl Module { | Definition::Const(_) | Definition::Static(_) | Definition::Trait(_) - | Definition::TraitAlias(_) | Definition::TypeAlias(_) ); if (def_out_sel || !is_item) && use_stmt_not_in_sel { let use_ = make::use_( + None, None, make::use_tree(make::join_paths(use_tree_paths), None, None, false), ); @@ -595,19 +629,6 @@ impl Module { import_path_to_be_removed } - fn make_use_stmt_of_node_with_super(&mut self, node_syntax: &SyntaxNode) -> ast::Item { - let super_path = make::ext::ident_path("super"); - let node_path = make::ext::ident_path(&node_syntax.to_string()); - let use_ = make::use_( - None, - make::use_tree(make::join_paths(vec![super_path, node_path]), None, None, false), - ); - - let item = ast::Item::from(use_); - self.use_items.insert(0, item.clone()); - item - } - fn process_use_stmt_for_import_resolve( &self, use_stmt: Option, @@ -1422,10 +1443,10 @@ $0fn foo(x: B) {}$0 struct B {} mod modname { - use super::B; - use super::A; + use super::B; + impl A { pub(crate) fn foo(x: B) {} } @@ -1737,4 +1758,27 @@ fn main() { "#, ); } + + #[test] + fn test_miss_select_item() { + check_assist( + extract_module, + r#" +mod foo { + mod $0bar { + fn foo(){}$0 + } +} +"#, + r#" +mod foo { + mod modname { + pub(crate) mod bar { + fn foo(){} + } + } +} +"#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index c56d0b3de5d6a..20ebd8f09c73f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -400,11 +400,12 @@ fn process_references( let segment = builder.make_mut(segment); let scope_node = builder.make_syntax_mut(scope_node); if !visited_modules.contains(&module) { + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(module.krate())); let mod_path = module.find_use_path( ctx.sema.db, *enum_module_def, ctx.config.insert_use.prefix_kind, - ctx.config.import_path_config(), + cfg, ); if let Some(mut mod_path) = mod_path { mod_path.pop_segment(); @@ -478,7 +479,7 @@ macro_rules! foo { }; } -struct TheVariant{ the_field: u8 } +struct TheVariant { the_field: u8 } enum TheEnum { TheVariant(TheVariant), @@ -502,7 +503,7 @@ enum Foo { } "#, r#" -struct Bar{ node: Box } +struct Bar { node: Box } enum Foo { Bar(Bar), @@ -519,7 +520,7 @@ enum Foo { } "#, r#" -struct Bar{ node: Box, a: Arc> } +struct Bar { node: Box, a: Arc> } enum Foo { Bar(Bar), @@ -560,7 +561,7 @@ enum A { One(One) }"#, check_assist( extract_struct_from_enum_variant, "enum A { $0One { foo: u32, bar: u32 } }", - r#"struct One{ foo: u32, bar: u32 } + r#"struct One { foo: u32, bar: u32 } enum A { One(One) }"#, ); @@ -571,7 +572,7 @@ enum A { One(One) }"#, check_assist( extract_struct_from_enum_variant, "enum A { $0One { foo: u32 } }", - r#"struct One{ foo: u32 } + r#"struct One { foo: u32 } enum A { One(One) }"#, ); @@ -582,7 +583,7 @@ enum A { One(One) }"#, check_assist( extract_struct_from_enum_variant, r"enum En { Var { a: T$0 } }", - r#"struct Var{ a: T } + r#"struct Var { a: T } enum En { Var(Var) }"#, ); @@ -599,7 +600,7 @@ enum Enum { Variant{ field: u32$0 } }"#, r#" #[derive(Debug)] #[derive(Clone)] -struct Variant{ field: u32 } +struct Variant { field: u32 } #[derive(Debug)] #[derive(Clone)] @@ -618,7 +619,7 @@ enum Enum { } }"#, r#" -struct Variant{ +struct Variant { field: u32 } @@ -642,7 +643,7 @@ mod indenting { }"#, r#" mod indenting { - struct Variant{ + struct Variant { field: u32 } @@ -668,7 +669,7 @@ enum A { } }"#, r#" -struct One{ +struct One { // leading comment /// doc comment #[an_attr] @@ -700,7 +701,7 @@ enum A { } }"#, r#" -struct One{ +struct One { // comment /// doc #[attr] @@ -747,7 +748,7 @@ enum A { /* comment */ // other /// comment -struct One{ +struct One { a: u32 } @@ -789,7 +790,7 @@ enum A { extract_struct_from_enum_variant, "enum A { $0One{ a: u32, pub(crate) b: u32, pub(super) c: u32, d: u32 } }", r#" -struct One{ a: u32, pub(crate) b: u32, pub(super) c: u32, d: u32 } +struct One { a: u32, pub(crate) b: u32, pub(super) c: u32, d: u32 } enum A { One(One) }"#, ); @@ -850,7 +851,7 @@ pub enum A { One(One) }"#, extract_struct_from_enum_variant, "pub(in something) enum A { $0One{ a: u32, b: u32 } }", r#" -pub(in something) struct One{ pub(in something) a: u32, pub(in something) b: u32 } +pub(in something) struct One { pub(in something) a: u32, pub(in something) b: u32 } pub(in something) enum A { One(One) }"#, ); @@ -862,7 +863,7 @@ pub(in something) enum A { One(One) }"#, extract_struct_from_enum_variant, "pub(crate) enum A { $0One{ a: u32, b: u32, c: u32 } }", r#" -pub(crate) struct One{ pub(crate) a: u32, pub(crate) b: u32, pub(crate) c: u32 } +pub(crate) struct One { pub(crate) a: u32, pub(crate) b: u32, pub(crate) c: u32 } pub(crate) enum A { One(One) }"#, ); @@ -933,7 +934,7 @@ fn f() { } "#, r#" -struct V{ i: i32, j: i32 } +struct V { i: i32, j: i32 } enum E { V(V) @@ -1027,7 +1028,7 @@ fn f() { "#, r#" //- /main.rs -struct V{ i: i32, j: i32 } +struct V { i: i32, j: i32 } enum E { V(V) @@ -1057,7 +1058,7 @@ fn foo() { } "#, r#" -struct One{ a: u32, b: u32 } +struct One { a: u32, b: u32 } enum A { One(One) } @@ -1114,7 +1115,7 @@ enum X<'a, 'b, 'x> { } "#, r#" -struct A<'a, 'x>{ a: &'a &'x mut () } +struct A<'a, 'x> { a: &'a &'x mut () } enum X<'a, 'b, 'x> { A(A<'a, 'x>), @@ -1136,7 +1137,7 @@ enum X<'b, T, V, const C: usize> { } "#, r#" -struct A<'b, T, const C: usize>{ a: T, b: X<'b>, c: [u8; C] } +struct A<'b, T, const C: usize> { a: T, b: X<'b>, c: [u8; C] } enum X<'b, T, V, const C: usize> { A(A<'b, T, C>), @@ -1158,7 +1159,7 @@ enum X<'a, 'b> { } "#, r#" -struct C{ c: () } +struct C { c: () } enum X<'a, 'b> { A { a: &'a () }, @@ -1180,7 +1181,7 @@ enum En { } "#, r#" -struct A{ a: T } +struct A { a: T } enum En { A(A), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs index 79f22381952ae..7f93506685e18 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -69,8 +69,9 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> edit.replace(ty.syntax(), new_ty.syntax()); // Insert new alias - let ty_alias = make::ty_alias("Type", generic_params, None, None, Some((ty, None))) - .clone_for_update(); + let ty_alias = + make::ty_alias(None, "Type", generic_params, None, None, Some((ty, None))) + .clone_for_update(); if let Some(cap) = ctx.config.snippet_cap && let Some(name) = ty_alias.name() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index c9c1969b9e023..bd88e8b09ced0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -200,7 +200,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op } ExtractionKind::Constant => { let ast_ty = make.ty(&ty_string); - ast::Item::Const(make.item_const(None, pat_name, ast_ty, initializer)) + ast::Item::Const(make.item_const(None, None, pat_name, ast_ty, initializer)) .into() } ExtractionKind::Static => { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs index 3badc17d01af4..55de537debf3d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs @@ -136,10 +136,6 @@ fn target_data_for_def( target_name = Some(t.name(db)); offset_target_and_file_id(db, t)? } - hir::ModuleDef::TraitAlias(t) => { - target_name = Some(t.name(db)); - offset_target_and_file_id(db, t)? - } hir::ModuleDef::TypeAlias(t) => { target_name = Some(t.name(db)); offset_target_and_file_id(db, t)? diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 2c81e2883a34a..d8a2e038d33c2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -155,6 +155,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let ret_type = method_source.ret_type(); let f = make::fn_( + None, vis, fn_name, type_params, @@ -195,6 +196,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let assoc_item_list = make::assoc_item_list(Some(vec![item])); let impl_def = make::impl_( + None, ty_params, ty_args, make::ty_path(make::ext::ident_path(name)), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index e96250f3c50a5..e87dde5b8e427 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -20,7 +20,6 @@ use syntax::{ HasGenericParams, HasName, HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred, edit::{self, AstNodeEdit}, - edit_in_place::AttrsOwnerEdit, make, }, ted::{self, Position}, @@ -266,6 +265,7 @@ fn generate_impl( let bound_params = bound_def.generic_param_list(); let delegate = make::impl_trait( + None, delegee.is_unsafe(db), bound_params.clone(), bound_params.map(|params| params.to_generic_args()), @@ -379,6 +379,7 @@ fn generate_impl( let path_type = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type)?; // 3) Generate delegate trait impl let delegate = make::impl_trait( + None, trait_.is_unsafe(db), trait_gen_params, trait_gen_args, @@ -652,8 +653,7 @@ fn process_assoc_item( qual_path_ty: ast::Path, base_name: &str, ) -> Option { - let attrs = item.attrs(); - let assoc = match item { + match item { AssocItem::Const(c) => const_assoc_item(c, qual_path_ty), AssocItem::Fn(f) => func_assoc_item(f, qual_path_ty, base_name), AssocItem::MacroCall(_) => { @@ -662,18 +662,7 @@ fn process_assoc_item( None } AssocItem::TypeAlias(ta) => ty_assoc_item(ta, qual_path_ty), - }; - if let Some(assoc) = &assoc { - attrs.for_each(|attr| { - assoc.add_attr(attr.clone()); - // fix indentations - if let Some(tok) = attr.syntax().next_sibling_or_token() { - let pos = Position::after(tok); - ted::insert(pos, make::tokens::whitespace(" ")); - } - }) } - assoc } fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option { @@ -687,6 +676,7 @@ fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option // make::path_qualified(qual_path_ty, path_expr_segment.as_single_segment().unwrap()); let qualified_path = qualified_path(qual_path_ty, path_expr_segment); let inner = make::item_const( + item.attrs(), item.visibility(), item.name()?, item.ty()?, @@ -755,6 +745,7 @@ fn func_assoc_item( let body = make::block_expr(vec![], Some(call.into())).clone_for_update(); let func = make::fn_( + item.attrs(), item.visibility(), item.name()?, item.generic_param_list(), @@ -779,13 +770,14 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option::t; + + #[cfg(not(test))] + type t = ::t; +} +"#, + ); + } + #[test] fn assoc_items_attributes_mutably_cloned() { check_assist( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs index 55a09c5d775d2..a1fc2c6023597 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs @@ -57,9 +57,9 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( }; let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(module.krate())); let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; - let trait_path = - module.find_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.import_path_config())?; + let trait_path = module.find_path(ctx.db(), ModuleDef::Trait(trait_), cfg)?; let field_type = field.ty()?; let field_name = field.name()?; @@ -99,9 +99,9 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() }; let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(module.krate())); let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; - let trait_path = - module.find_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.import_path_config())?; + let trait_path = module.find_path(ctx.db(), ModuleDef::Trait(trait_), cfg)?; let field_type = field.ty()?; let target = field.syntax().text_range(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs index 73a69c82fbcdd..06fef4af22382 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs @@ -1,6 +1,8 @@ use syntax::{ + SyntaxKind::{ATTR, COMMENT, WHITESPACE}, T, - ast::{self, AstNode, HasAttrs, edit_in_place::AttrsOwnerEdit, make}, + ast::{self, AstNode, HasAttrs, edit::IndentLevel, make}, + syntax_editor::{Element, Position}, }; use crate::{AssistContext, AssistId, Assists}; @@ -48,8 +50,20 @@ pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt )) .clone_for_update(); - let nominal = edit.make_mut(nominal); - nominal.add_attr(derive.clone()); + let mut editor = edit.make_editor(nominal.syntax()); + let indent = IndentLevel::from_node(nominal.syntax()); + let after_attrs_and_comments = nominal + .syntax() + .children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .map_or(Position::first_child_of(nominal.syntax()), Position::before); + editor.insert_all( + after_attrs_and_comments, + vec![ + derive.syntax().syntax_element(), + make::tokens::whitespace(&format!("\n{indent}")).syntax_element(), + ], + ); let delimiter = derive .meta() @@ -58,8 +72,9 @@ pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt .expect("failed to get token tree out of Meta") .r_paren_token() .expect("make::attr_outer was expected to have a R_PAREN"); - - edit.add_tabstop_before_token(cap, delimiter); + let tabstop_before = edit.make_tabstop_before(cap); + editor.add_annotation(delimiter, tabstop_before); + edit.add_file_edits(ctx.vfs_file_id(), editor); } Some(_) => { // Just move the cursor. diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs index 3c327a63b0f0b..0b7eca2290f62 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs @@ -94,6 +94,7 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) // Insert new alias let ty_alias = make::ty_alias( + None, &alias_name, generic_params, None, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs index af949a0649899..d88b0f34b7969 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs @@ -1,3 +1,4 @@ +use hir::next_solver::{DbInterner, TypingMode}; use ide_db::{RootDatabase, famous_defs::FamousDefs}; use syntax::ast::{self, AstNode, HasName}; @@ -80,17 +81,20 @@ fn existing_from_impl( sema: &'_ hir::Semantics<'_, RootDatabase>, variant: &ast::Variant, ) -> Option<()> { + let db = sema.db; let variant = sema.to_def(variant)?; - let enum_ = variant.parent_enum(sema.db); - let krate = enum_.module(sema.db).krate(); - + let krate = variant.module(db).krate(); let from_trait = FamousDefs(sema, krate).core_convert_From()?; + let interner = DbInterner::new_with(db, Some(krate.base()), None); + use hir::next_solver::infer::DbInternerInferExt; + let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis()); - let enum_type = enum_.ty(sema.db); - - let wrapped_type = variant.fields(sema.db).first()?.ty(sema.db); - - if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { Some(()) } else { None } + let variant = variant.instantiate_infer(&infcx); + let enum_ = variant.parent_enum(sema.db); + let field_ty = variant.fields(sema.db).first()?.ty(sema.db); + let enum_ty = enum_.ty(sema.db); + tracing::debug!(?enum_, ?field_ty, ?enum_ty); + enum_ty.impls_trait(infcx, from_trait, &[field_ty]).then_some(()) } #[cfg(test)] @@ -119,15 +123,19 @@ impl From for A { ); } + // FIXME(next-solver): it would be nice to not be *required* to resolve the + // path in order to properly generate assists #[test] fn test_generate_from_impl_for_enum_complicated_path() { check_assist( generate_from_impl_for_enum, r#" //- minicore: from +mod foo { pub mod bar { pub mod baz { pub struct Boo; } } } enum A { $0One(foo::bar::baz::Boo) } "#, r#" +mod foo { pub mod bar { pub mod baz { pub struct Boo; } } } enum A { One(foo::bar::baz::Boo) } impl From for A { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 613b32fcc1653..a9cf2c1bae1a4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -189,7 +189,7 @@ fn add_func_to_accumulator( ))); // FIXME: adt may have generic params. - let impl_ = make::impl_(None, None, name, None, None).clone_for_update(); + let impl_ = make::impl_(None, None, None, name, None, None).clone_for_update(); func.indent(IndentLevel(1)); impl_.get_or_create_assoc_item_list().add_item(func.into()); @@ -316,7 +316,7 @@ impl FunctionBuilder { let current_module = ctx.sema.scope(call.syntax())?.module(); let visibility = calculate_necessary_visibility(current_module, target_module, ctx); - let fn_name = make::name(&name.text()); + let fn_name = make::name(name.ident_token()?.text()); let mut necessary_generic_params = FxHashSet::default(); necessary_generic_params.extend(receiver_ty.generic_params(ctx.db())); let params = fn_args( @@ -364,10 +364,13 @@ impl FunctionBuilder { Visibility::Crate => Some(make::visibility_pub_crate()), Visibility::Pub => Some(make::visibility_pub()), }; + let type_params = + self.generic_param_list.filter(|list| list.generic_params().next().is_some()); let fn_def = make::fn_( + None, visibility, self.fn_name, - self.generic_param_list, + type_params, self.where_clause, self.params, self.fn_body, @@ -2414,6 +2417,33 @@ impl Foo { ) } + #[test] + fn create_method_with_unused_generics() { + check_assist( + generate_function, + r#" +struct Foo(S); +impl Foo { + fn foo(&self) { + self.bar()$0; + } +} +"#, + r#" +struct Foo(S); +impl Foo { + fn foo(&self) { + self.bar(); + } + + fn bar(&self) ${0:-> _} { + todo!() + } +} +"#, + ) + } + #[test] fn create_function_with_async() { check_assist( @@ -3130,4 +3160,32 @@ fn main() { "#, ) } + + #[test] + fn no_generate_method_by_keyword() { + check_assist_not_applicable( + generate_function, + r#" +fn main() { + s.super$0(); +} + "#, + ); + check_assist_not_applicable( + generate_function, + r#" +fn main() { + s.Self$0(); +} + "#, + ); + check_assist_not_applicable( + generate_function, + r#" +fn main() { + s.self$0(); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 807b9194b2df7..e42d0ed1b00b0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -263,6 +263,7 @@ fn generate_getter_from_info( let body = make::block_expr([], Some(body)); make::fn_( + None, strukt.visibility(), fn_name, None, @@ -299,6 +300,7 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI // Make the setter fn make::fn_( + None, strukt.visibility(), fn_name, None, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index b38ee6f7dce8e..77eb8efc6f6af 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -174,6 +174,7 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> let make_impl_ = |body| { make::impl_trait( + None, trait_.unsafe_token().is_some(), None, trait_gen_args.clone(), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 351f134612f00..9760fd62aab4f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -77,10 +77,11 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?)); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(current_module.krate())); let type_path = current_module.find_path( ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?, - ctx.config.import_path_config(), + cfg, )?; let edition = current_module.krate().edition(ctx.db()); @@ -134,6 +135,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let ret_type = make::ret_type(make::ty_path(make::ext::ident_path("Self"))); let fn_ = make::fn_( + None, strukt.visibility(), make::name("new"), None, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs index 4e95ceb2e853e..cad14d929648a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs @@ -1,4 +1,5 @@ use ast::make; +use hir::next_solver::{DbInterner, TypingMode}; use hir::{HasCrate, ModuleDef, Semantics}; use ide_db::{ RootDatabase, famous_defs::FamousDefs, helpers::mod_path_to_ast, @@ -6,13 +7,12 @@ use ide_db::{ }; use syntax::{ TokenText, - ast::{self, AstNode, HasGenericParams, HasName, edit, edit_in_place::Indent}, + ast::{self, AstNode, HasAttrs, HasGenericParams, HasName, edit, edit_in_place::Indent}, }; use crate::{ AssistId, assist_context::{AssistContext, Assists}, - utils::add_cfg_attrs_to, }; // Assist: generate_single_field_struct_from @@ -48,6 +48,7 @@ pub(crate) fn generate_single_field_struct_from( let strukt_name = ctx.find_node_at_offset::()?; let adt = ast::Adt::cast(strukt_name.syntax().parent()?)?; let ast::Adt::Struct(strukt) = adt else { + tracing::debug!(?adt); return None; }; @@ -58,10 +59,12 @@ pub(crate) fn generate_single_field_struct_from( let constructors = make_constructors(ctx, module, &types); if constructors.iter().filter(|expr| expr.is_none()).count() != 1 { + tracing::debug!(?constructors); return None; } let main_field_i = constructors.iter().position(Option::is_none)?; if from_impl_exists(&strukt, main_field_i, &ctx.sema).is_some() { + tracing::debug!(?strukt, ?main_field_i); return None; } @@ -89,6 +92,7 @@ pub(crate) fn generate_single_field_struct_from( let body = make::block_expr([], Some(constructor)); let fn_ = make::fn_( + None, None, make::name("from"), None, @@ -110,8 +114,12 @@ pub(crate) fn generate_single_field_struct_from( .clone_for_update(); fn_.indent(1.into()); + let cfg_attrs = strukt + .attrs() + .filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg")); let impl_ = make::impl_trait( + cfg_attrs, false, None, trait_gen_args, @@ -128,8 +136,6 @@ pub(crate) fn generate_single_field_struct_from( impl_.get_or_create_assoc_item_list().add_item(fn_.into()); - add_cfg_attrs_to(&strukt, &impl_); - impl_.reindent_to(indent); builder.insert(strukt.syntax().text_range().end(), format!("\n\n{indent}{impl_}")); @@ -163,6 +169,7 @@ fn make_constructors( types: &[ast::Type], ) -> Vec> { let (db, sema) = (ctx.db(), &ctx.sema); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(module.krate())); types .iter() .map(|ty| { @@ -173,11 +180,7 @@ fn make_constructors( let item_in_ns = ModuleDef::Adt(ty.as_adt()?).into(); let edition = module.krate().edition(db); - let ty_path = module.find_path( - db, - item_for_path_search(db, item_in_ns)?, - ctx.config.import_path_config(), - )?; + let ty_path = module.find_path(db, item_for_path_search(db, item_in_ns)?, cfg)?; use_trivial_constructor(db, mod_path_to_ast(&ty_path, edition), &ty, edition) }) @@ -198,6 +201,7 @@ fn get_fields(strukt: &ast::Struct) -> Option<(Option>, Vec it.get_or_create_where_clause(), ast::Trait(it) => it.get_or_create_where_clause(), - ast::TraitAlias(it) => it.get_or_create_where_clause(), ast::Impl(it) => it.get_or_create_where_clause(), ast::Enum(it) => it.get_or_create_where_clause(), ast::Struct(it) => it.get_or_create_where_clause(), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs index 644d1f6cafefc..6b50718424c72 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs @@ -40,28 +40,34 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>) return None; } let space_before_guard = guard.syntax().prev_sibling_or_token(); + let space_after_arrow = match_arm.fat_arrow_token()?.next_sibling_or_token(); - let guard_condition = guard.condition()?; + let guard_condition = guard.condition()?.reset_indent(); let arm_expr = match_arm.expr()?; - let if_expr = - make::expr_if(guard_condition, make::block_expr(None, Some(arm_expr.clone())), None) - .indent(arm_expr.indent_level()); + let then_branch = make::block_expr(None, Some(arm_expr.reset_indent().indent(1.into()))); + let if_expr = make::expr_if(guard_condition, then_branch, None).indent(arm_expr.indent_level()); let target = guard.syntax().text_range(); acc.add( AssistId::refactor_rewrite("move_guard_to_arm_body"), "Move guard to arm body", target, - |edit| { - match space_before_guard { - Some(element) if element.kind() == WHITESPACE => { - edit.delete(element.text_range()); - } - _ => (), - }; + |builder| { + let mut edit = builder.make_editor(match_arm.syntax()); + if let Some(element) = space_before_guard + && element.kind() == WHITESPACE + { + edit.delete(element); + } + if let Some(element) = space_after_arrow + && element.kind() == WHITESPACE + { + edit.replace(element, make::tokens::single_space()); + } - edit.delete(guard.syntax().text_range()); - edit.replace_ast(arm_expr, if_expr.into()); + edit.delete(guard.syntax()); + edit.replace(arm_expr.syntax(), if_expr.syntax()); + builder.add_file_edits(ctx.vfs_file_id(), edit); }, ) } @@ -298,6 +304,44 @@ fn main() { ); } + #[test] + fn move_multiline_guard_to_arm_body_works() { + check_assist( + move_guard_to_arm_body, + r#" +fn main() { + match 92 { + x $0if true + && true + && true => + { + { + false + } + }, + _ => true + } +} +"#, + r#" +fn main() { + match 92 { + x => if true + && true + && true { + { + { + false + } + } + }, + _ => true + } +} +"#, + ); + } + #[test] fn move_guard_to_arm_body_works_complex_match() { check_assist( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs index bba28b5fc8af5..36da1d1788247 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs @@ -109,8 +109,8 @@ mod tests { #[test] fn test_order() { check_assist_variations!( - "foo::{*, Qux, bar::{Quux, Bar}, baz, FOO_BAZ, self, Baz}", - "foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}" + "foo::{*, Qux, bar::{Quux, Bar}, baz, FOO_BAZ, self, Baz, v10, v9, r#aaa}", + "foo::{self, Baz, FOO_BAZ, Qux, r#aaa, bar::{Bar, Quux}, baz, v9, v10, *}" ); } @@ -145,17 +145,17 @@ fn main() { #[test] fn test_redundant_braces() { - check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}"); + check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{Qux, baz}"); check_assist_variations!("foo::{bar::{self}}", "foo::bar::{self}"); check_assist_variations!("foo::{bar::{*}}", "foo::bar::*"); check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux"); check_assist_variations!( "foo::bar::{{FOO_BAZ, Qux, self}, {*, baz}}", - "foo::bar::{self, baz, Qux, FOO_BAZ, *}" + "foo::bar::{self, FOO_BAZ, Qux, baz, *}" ); check_assist_variations!( "foo::bar::{{{FOO_BAZ}, {{Qux}, {self}}}, {{*}, {baz}}}", - "foo::bar::{self, baz, Qux, FOO_BAZ, *}" + "foo::bar::{self, FOO_BAZ, Qux, baz, *}" ); } @@ -163,11 +163,11 @@ fn main() { fn test_merge() { check_assist_variations!( "foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux}}", - "foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}" + "foo::{FOO_BAZ, Quux, bar::{self, baz, *}, qux, *}" ); check_assist_variations!( "foo::{*, bar, {FOO_BAZ, qux}, bar::{*, baz}, {Quux, bar::{baz::Foo}}}", - "foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}" + "foo::{FOO_BAZ, Quux, bar::{self, baz::{self, Foo}, *}, qux, *}" ); } @@ -229,15 +229,15 @@ use { check_assist_not_applicable_variations!("foo::bar"); check_assist_not_applicable_variations!("foo::bar::*"); check_assist_not_applicable_variations!("foo::bar::Qux as Quux"); - check_assist_not_applicable_variations!("foo::bar::{self, baz, Qux, FOO_BAZ, *}"); + check_assist_not_applicable_variations!("foo::bar::{self, FOO_BAZ, Qux, baz, *}"); check_assist_not_applicable_variations!( - "foo::{self, bar::{Bar, Quux}, baz, Baz, Qux, FOO_BAZ, *}" + "foo::{self, Baz, FOO_BAZ, Qux, bar::{Bar, Quux}, baz, *}" ); check_assist_not_applicable_variations!( - "foo::{bar::{self, baz, *}, qux, Quux, FOO_BAZ, *}" + "foo::{FOO_BAZ, Quux, bar::{self, baz, *}, qux, *}" ); check_assist_not_applicable_variations!( - "foo::{bar::{self, baz::{self, Foo}, *}, qux, Quux, FOO_BAZ, *}" + "foo::{bar::{self, FOO_BAZ, Quux, baz::{self, Foo}, *}, qux, *}" ); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs index 603be4d66733d..547d3686e3909 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs @@ -88,7 +88,7 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) } } - let item = make.item_const(None, make.name(&name), make.ty(&ty), initializer); + let item = make.item_const(None, None, make.name(&name), make.ty(&ty), initializer); if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) { let tabstop = edit.make_tabstop_before(cap); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs index 985121780b1ab..e4494f0492ece 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -45,10 +45,11 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> let current_edition = current_module.krate().edition(ctx.db()); let target_module_def = ModuleDef::from(resolved_call); let item_in_ns = ItemInNs::from(target_module_def); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(current_module.krate())); let receiver_path = current_module.find_path( ctx.sema.db, item_for_path_search(ctx.sema.db, item_in_ns)?, - ctx.config.import_path_config(), + cfg, )?; let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs index 9356d02706c93..414f6746d4404 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs @@ -112,6 +112,16 @@ fn compute_dbg_replacement( } } } + // dbg!(2, 'x', &x, x, ...); + exprs if ast::ExprStmt::can_cast(parent.kind()) && exprs.iter().all(pure_expr) => { + let mut replace = vec![parent.clone().into()]; + if let Some(prev_sibling) = parent.prev_sibling_or_token() + && prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE + { + replace.push(prev_sibling); + } + (replace, None) + } // dbg!(expr0) [expr] => { // dbg!(expr, &parent); @@ -163,6 +173,20 @@ fn compute_dbg_replacement( }) } +fn pure_expr(expr: &ast::Expr) -> bool { + match_ast! { + match (expr.syntax()) { + ast::Literal(_) => true, + ast::RefExpr(it) => { + matches!(it.expr(), Some(ast::Expr::PathExpr(p)) + if p.path().and_then(|p| p.as_single_name_ref()).is_some()) + }, + ast::PathExpr(it) => it.path().and_then(|it| it.as_single_name_ref()).is_some(), + _ => false, + } + } +} + fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr { if let ast::Expr::MacroExpr(mac) = &expanded { // Special-case when `expanded` itself is `dbg!()` since we cannot replace the whole tree @@ -231,6 +255,32 @@ mod tests { check("dbg!{$01 + 1}", "1 + 1"); } + #[test] + fn test_remove_simple_dbg_statement() { + check_assist( + remove_dbg, + r#" +fn foo() { + let n = 2; + $0dbg!(3); + dbg!(2.6); + dbg!(1, 2.5); + dbg!('x'); + dbg!(&n); + dbg!(n); + // needless comment + dbg!("foo");$0 +} +"#, + r#" +fn foo() { + let n = 2; + // needless comment +} +"#, + ); + } + #[test] fn test_remove_dbg_not_applicable() { check_assist_not_applicable(remove_dbg, "fn main() {$0vec![1, 2, 3]}"); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs index 6b385a03625b7..440ab4d4604f2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs @@ -102,6 +102,9 @@ fn is_primitive_int(ctx: &AssistContext<'_>, expr: &ast::Expr) -> bool { /// Extract the operands of an arithmetic expression (e.g. `1 + 2` or `1.checked_add(2)`) fn parse_binary_op(ctx: &AssistContext<'_>) -> Option<(ast::Expr, ArithOp, ast::Expr)> { + if !ctx.has_empty_selection() { + return None; + } let expr = ctx.find_node_at_offset::()?; let op = match expr.op_kind() { @@ -163,7 +166,7 @@ impl ArithKind { #[cfg(test)] mod tests { - use crate::tests::check_assist; + use crate::tests::{check_assist, check_assist_not_applicable}; use super::*; @@ -220,6 +223,18 @@ fn main() { fn main() { let x = 1.wrapping_add(2); } +"#, + ) + } + + #[test] + fn replace_arith_not_applicable_with_non_empty_selection() { + check_assist_not_applicable( + replace_arith_with_checked, + r#" +fn main() { + let x = 1 $0+$0 2; +} "#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 175f261317058..25c5593007bcb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -71,6 +71,7 @@ pub(crate) fn replace_derive_with_manual_impl( let current_module = ctx.sema.scope(adt.syntax())?.module(); let current_crate = current_module.krate(); let current_edition = current_crate.edition(ctx.db()); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(current_crate)); let found_traits = items_locator::items_with_name( ctx.db(), @@ -84,7 +85,7 @@ pub(crate) fn replace_derive_with_manual_impl( }) .flat_map(|trait_| { current_module - .find_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.import_path_config()) + .find_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), cfg) .as_ref() .map(|path| mod_path_to_ast(path, current_edition)) .zip(Some(trait_)) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs index 15d3db5e749f0..dd244375dc91e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs @@ -8,7 +8,7 @@ use ide_db::{ ty_filter::TryEnum, }; use syntax::{ - AstNode, T, TextRange, + AstNode, Edition, T, TextRange, ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory}, }; @@ -187,7 +187,7 @@ fn make_else_arm( // Assist: replace_match_with_if_let // -// Replaces a binary `match` with a wildcard pattern and no guards with an `if let` expression. +// Replaces a binary `match` with a wildcard pattern with an `if let` expression. // // ``` // enum Action { Move { distance: u32 }, Stop } @@ -225,18 +225,24 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<' let mut arms = match_arm_list.arms(); let (first_arm, second_arm) = (arms.next()?, arms.next()?); - if arms.next().is_some() || first_arm.guard().is_some() || second_arm.guard().is_some() { + if arms.next().is_some() || second_arm.guard().is_some() { + return None; + } + if first_arm.guard().is_some() && ctx.edition() < Edition::Edition2024 { return None; } - let (if_let_pat, then_expr, else_expr) = pick_pattern_and_expr_order( + let (if_let_pat, guard, then_expr, else_expr) = pick_pattern_and_expr_order( &ctx.sema, first_arm.pat()?, second_arm.pat()?, first_arm.expr()?, second_arm.expr()?, + first_arm.guard(), + second_arm.guard(), )?; let scrutinee = match_expr.expr()?; + let guard = guard.and_then(|it| it.condition()); let let_ = match &if_let_pat { ast::Pat::LiteralPat(p) @@ -277,6 +283,11 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<' } _ => make.expr_let(if_let_pat, scrutinee).into(), }; + let condition = if let Some(guard) = guard { + make.expr_bin(condition, ast::BinaryOp::LogicOp(ast::LogicOp::And), guard).into() + } else { + condition + }; let then_expr = then_expr.clone_for_update(); then_expr.reindent_to(IndentLevel::single()); let then_block = make_block_expr(then_expr); @@ -303,18 +314,23 @@ fn pick_pattern_and_expr_order( pat2: ast::Pat, expr: ast::Expr, expr2: ast::Expr, -) -> Option<(ast::Pat, ast::Expr, ast::Expr)> { + guard: Option, + guard2: Option, +) -> Option<(ast::Pat, Option, ast::Expr, ast::Expr)> { + if guard.is_some() && guard2.is_some() { + return None; + } let res = match (pat, pat2) { (ast::Pat::WildcardPat(_), _) => return None, - (pat, ast::Pat::WildcardPat(_)) => (pat, expr, expr2), - (pat, _) if is_empty_expr(&expr2) => (pat, expr, expr2), - (_, pat) if is_empty_expr(&expr) => (pat, expr2, expr), + (pat, ast::Pat::WildcardPat(_)) => (pat, guard, expr, expr2), + (pat, _) if is_empty_expr(&expr2) => (pat, guard, expr, expr2), + (_, pat) if is_empty_expr(&expr) => (pat, guard, expr2, expr), (pat, pat2) => match (binds_name(sema, &pat), binds_name(sema, &pat2)) { (true, true) => return None, - (true, false) => (pat, expr, expr2), - (false, true) => (pat2, expr2, expr), - _ if is_sad_pat(sema, &pat) => (pat2, expr2, expr), - (false, false) => (pat, expr, expr2), + (true, false) => (pat, guard, expr, expr2), + (false, true) => (pat2, guard2, expr2, expr), + _ if is_sad_pat(sema, &pat) => (pat2, guard2, expr2, expr), + (false, false) => (pat, guard, expr, expr2), }, }; Some(res) @@ -1849,6 +1865,30 @@ fn main() { code() } } +"#, + ) + } + + #[test] + fn test_replace_match_with_if_let_chain() { + check_assist( + replace_match_with_if_let, + r#" +fn main() { + match$0 Some(0) { + Some(n) if n % 2 == 0 && n != 6 => (), + _ => code(), + } +} +"#, + r#" +fn main() { + if let Some(n) = Some(0) && n % 2 == 0 && n != 6 { + () + } else { + code() + } +} "#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs index 3cd7b58f4ddd4..df7057835c346 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs @@ -7,11 +7,8 @@ use ide_db::{ }; use syntax::{ AstNode, - ast::{ - self, HasGenericParams, HasName, HasTypeBounds, Name, NameLike, PathType, - make::impl_trait_type, - }, - match_ast, ted, + ast::{self, HasGenericParams, HasName, HasTypeBounds, Name, NameLike, PathType, make}, + match_ast, }; use crate::{AssistContext, AssistId, Assists}; @@ -74,26 +71,31 @@ pub(crate) fn replace_named_generic_with_impl( "Replace named generic with impl trait", target, |edit| { - let type_param = edit.make_mut(type_param); - let fn_ = edit.make_mut(fn_); - - let path_types_to_replace = path_types_to_replace - .into_iter() - .map(|param| edit.make_mut(param)) - .collect::>(); + let mut editor = edit.make_editor(type_param.syntax()); // remove trait from generic param list if let Some(generic_params) = fn_.generic_param_list() { - generic_params.remove_generic_param(ast::GenericParam::TypeParam(type_param)); - if generic_params.generic_params().count() == 0 { - ted::remove(generic_params.syntax()); + let params: Vec = generic_params + .clone() + .generic_params() + .filter(|it| it.syntax() != type_param.syntax()) + .collect(); + if params.is_empty() { + editor.delete(generic_params.syntax()); + } else { + let new_generic_param_list = make::generic_param_list(params); + editor.replace( + generic_params.syntax(), + new_generic_param_list.syntax().clone_for_update(), + ); } } - let new_bounds = impl_trait_type(type_bound_list); + let new_bounds = make::impl_trait_type(type_bound_list); for path_type in path_types_to_replace.iter().rev() { - ted::replace(path_type.syntax(), new_bounds.clone_for_update().syntax()); + editor.replace(path_type.syntax(), new_bounds.clone_for_update().syntax()); } + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index 9f742131e5cb4..5fc4d7a6170de 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -63,12 +63,9 @@ pub(crate) fn replace_qualified_name_with_use( ); let path_to_qualifier = starts_with_name_ref .then(|| { - ctx.sema.scope(original_path.syntax())?.module().find_use_path( - ctx.sema.db, - module, - ctx.config.insert_use.prefix_kind, - ctx.config.import_path_config(), - ) + let mod_ = ctx.sema.scope(original_path.syntax())?.module(); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(mod_.krate())); + mod_.find_use_path(ctx.sema.db, module, ctx.config.insert_use.prefix_kind, cfg) }) .flatten(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs index 6527d3706e217..209d3f08eb888 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs @@ -55,7 +55,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< path.gen_source_code( &scope, &mut formatter, - ctx.config.import_path_config(), + ctx.config.find_path_confg(ctx.sema.is_nightly(scope.module().krate())), scope.krate().to_display_target(ctx.db()), ) .ok() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs index eed070cb07dd6..aed66d3d8344c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs @@ -132,12 +132,9 @@ pub(crate) fn desugar_async_into_impl_future( let scope = ctx.sema.scope(function.syntax())?; let module = scope.module(); + let cfg = ctx.config.find_path_confg(ctx.sema.is_nightly(module.krate())); let future_trait = FamousDefs(&ctx.sema, scope.krate()).core_future_Future()?; - let trait_path = module.find_path( - ctx.db(), - ModuleDef::Trait(future_trait), - ctx.config.import_path_config(), - )?; + let trait_path = module.find_path(ctx.db(), ModuleDef::Trait(future_trait), cfg)?; let edition = scope.krate().edition(ctx.db()); let trait_path = trait_path.display(ctx.db(), edition); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs index 504e12f93df61..bf1546986ed27 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs @@ -1,6 +1,6 @@ use ide_db::assists::AssistId; use syntax::{ - AstNode, T, + AstNode, SyntaxToken, T, ast::{self, syntax_factory::SyntaxFactory}, }; @@ -39,7 +39,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) let makro = ctx.find_node_at_offset::()?; let cursor_offset = ctx.offset(); - let semicolon = makro.semicolon_token(); + let semicolon = macro_semicolon(&makro); let token_tree = makro.token_tree()?; let ltoken = token_tree.left_delimiter_token()?; @@ -95,6 +95,14 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) ) } +fn macro_semicolon(makro: &ast::MacroCall) -> Option { + makro.semicolon_token().or_else(|| { + let macro_expr = ast::MacroExpr::cast(makro.syntax().parent()?)?; + let expr_stmt = ast::ExprStmt::cast(macro_expr.syntax().parent()?)?; + expr_stmt.semicolon_token() + }) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -119,7 +127,29 @@ macro_rules! sth { sth!{ } "#, - ) + ); + + check_assist( + toggle_macro_delimiter, + r#" +macro_rules! sth { + () => {}; +} + +fn foo() { + sth!$0( ); +} + "#, + r#" +macro_rules! sth { + () => {}; +} + +fn foo() { + sth!{ } +} + "#, + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_imports.rs index c066f41ca47b7..accb5c28d6ed3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_imports.rs @@ -1,9 +1,6 @@ use syntax::{ AstNode, SyntaxKind, - ast::{ - self, HasAttrs, HasVisibility, edit::IndentLevel, edit_in_place::AttrsOwnerEdit, make, - syntax_factory::SyntaxFactory, - }, + ast::{self, HasAttrs, HasVisibility, edit::IndentLevel, make, syntax_factory::SyntaxFactory}, syntax_editor::{Element, Position, Removable}, }; @@ -46,13 +43,10 @@ pub(crate) fn unmerge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt acc.add(AssistId::refactor_rewrite("unmerge_imports"), label, target, |builder| { let make = SyntaxFactory::with_mappings(); let new_use = make.use_( + use_.attrs(), use_.visibility(), make.use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()), ); - // Add any attributes that are present on the use tree - use_.attrs().for_each(|attr| { - new_use.add_attr(attr.clone_for_update()); - }); let mut editor = builder.make_editor(use_.syntax()); // Remove the use tree from the current use item diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs index 1f89a3d5f17c9..a58b1da621c7c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs @@ -1,3 +1,4 @@ +use hir::AsAssocItem; use syntax::{ TextRange, ast::{self, AstNode, HasArgList, prec::ExprPrecedence}, @@ -43,6 +44,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) let qualifier = path.qualifier()?; let method_name = path.segment()?.name_ref()?; + let scope = ctx.sema.scope(path.syntax())?; let res = ctx.sema.resolve_path(&path)?; let hir::PathResolution::Def(hir::ModuleDef::Function(fun)) = res else { return None }; if !fun.has_self_param(ctx.sema.db) { @@ -78,7 +80,14 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) edit.insert(close, ")"); } edit.replace(replace_comma, format!(".{method_name}(")); - add_import(qualifier, ctx, edit); + + if let Some(fun) = fun.as_assoc_item(ctx.db()) + && let Some(trait_) = fun.container_or_implemented_trait(ctx.db()) + && !scope.can_use_trait_methods(trait_) + { + // Only add an import for trait methods that are not already imported. + add_import(qualifier, ctx, edit); + } }, ) } @@ -235,4 +244,111 @@ impl S { fn assoc(S: S, S: S) {} } fn f() { S::assoc$0(S, S); }"#, ); } + + #[test] + fn inherent_method() { + check_assist( + unqualify_method_call, + r#" +mod foo { + pub struct Bar; + impl Bar { + pub fn bar(self) {} + } +} + +fn baz() { + foo::Bar::b$0ar(foo::Bar); +} + "#, + r#" +mod foo { + pub struct Bar; + impl Bar { + pub fn bar(self) {} + } +} + +fn baz() { + foo::Bar.bar(); +} + "#, + ); + } + + #[test] + fn trait_method_in_impl() { + check_assist( + unqualify_method_call, + r#" +mod foo { + pub trait Bar { + pub fn bar(self) {} + } +} + +struct Baz; +impl foo::Bar for Baz { + fn bar(self) { + foo::Bar::b$0ar(Baz); + } +} + "#, + r#" +mod foo { + pub trait Bar { + pub fn bar(self) {} + } +} + +struct Baz; +impl foo::Bar for Baz { + fn bar(self) { + Baz.bar(); + } +} + "#, + ); + } + + #[test] + fn trait_method_already_imported() { + check_assist( + unqualify_method_call, + r#" +mod foo { + pub struct Foo; + pub trait Bar { + pub fn bar(self) {} + } + impl Bar for Foo { + pub fn bar(self) {} + } +} + +use foo::Bar; + +fn baz() { + foo::Bar::b$0ar(foo::Foo); +} + "#, + r#" +mod foo { + pub struct Foo; + pub trait Bar { + pub fn bar(self) {} + } + impl Bar for Foo { + pub fn bar(self) {} + } +} + +use foo::Bar; + +fn baz() { + foo::Foo.bar(); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index cda2ad43278ac..c0637a7470f3e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -1,11 +1,11 @@ mod generated; use expect_test::expect; -use hir::Semantics; +use hir::{Semantics, db::HirDatabase, setup_tracing}; use ide_db::{ EditionedFileId, FileRange, RootDatabase, SnippetCap, assists::ExprFillDefaultMode, - base_db::SourceDatabase, + base_db::{SourceDatabase, salsa}, imports::insert_use::{ImportGranularity, InsertUseConfig}, source_change::FileSystemEdit, }; @@ -16,7 +16,7 @@ use test_utils::{assert_eq_text, extract_offset}; use crate::{ Assist, AssistConfig, AssistContext, AssistKind, AssistResolveStrategy, Assists, SingleResolve, - assists, handlers::Handler, + handlers::Handler, }; pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { @@ -103,6 +103,18 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { prefer_self_ty: false, }; +fn assists( + db: &RootDatabase, + config: &AssistConfig, + resolve: AssistResolveStrategy, + range: ide_db::FileRange, +) -> Vec { + salsa::attach(db, || { + HirDatabase::zalsa_register_downcaster(db); + crate::assists(db, config, resolve, range) + }) +} + pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) { RootDatabase::with_single_file(text) } @@ -168,6 +180,7 @@ pub(crate) fn check_assist_import_one( // There is no way to choose what assist within a group you want to test against, // so this is here to allow you choose. +#[track_caller] pub(crate) fn check_assist_by_label( assist: Handler, #[rust_analyzer::rust_fixture] ra_fixture_before: &str, @@ -305,6 +318,7 @@ fn check_with_config( expected: ExpectedResult<'_>, assist_label: Option<&str>, ) { + let _tracing = setup_tracing(); let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); db.enable_proc_attr_macros(); let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string(); @@ -318,7 +332,10 @@ fn check_with_config( _ => AssistResolveStrategy::All, }; let mut acc = Assists::new(&ctx, resolve); - handler(&mut acc, &ctx); + salsa::attach(&db, || { + HirDatabase::zalsa_register_downcaster(&db); + handler(&mut acc, &ctx); + }); let mut res = acc.finish(); let assist = match assist_label { @@ -453,7 +470,6 @@ pub fn test_some_range(a: int) -> bool { let expected = labels(&assists); expect![[r#" - Convert integer base Extract into... Replace if let with match "#]] @@ -486,7 +502,6 @@ pub fn test_some_range(a: int) -> bool { let expected = labels(&assists); expect![[r#" - Convert integer base Extract into... Replace if let with match "#]] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 91aac9cf7b608..20e0302b57d70 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -736,8 +736,11 @@ fn generate_impl_inner( generic_params.as_ref().map(|params| params.to_generic_args().clone_for_update()); let ty = make::ty_path(make::ext::ident_path(&adt.name().unwrap().text())); - let impl_ = match trait_ { + let cfg_attrs = + adt.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg")); + match trait_ { Some(trait_) => make::impl_trait( + cfg_attrs, is_unsafe, None, None, @@ -750,26 +753,9 @@ fn generate_impl_inner( adt.where_clause(), body, ), - None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), body), - } - .clone_for_update(); - - // Copy any cfg attrs from the original adt - add_cfg_attrs_to(adt, &impl_); - - impl_ -} - -pub(crate) fn add_cfg_attrs_to(from: &T, to: &U) -where - T: HasAttrs, - U: AttrsOwnerEdit, -{ - let cfg_attrs = - from.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg")); - for attr in cfg_attrs { - to.add_attr(attr.clone_for_update()); + None => make::impl_(cfg_attrs, generic_params, generic_args, ty, adt.where_clause(), body), } + .clone_for_update() } pub(crate) fn add_method_to_adt( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 11d26228ba201..e36e0e5704581 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -654,7 +654,7 @@ fn enum_variants_with_paths( if let Some(path) = ctx.module.find_path( ctx.db, hir::ModuleDef::from(variant), - ctx.config.import_path_config(ctx.is_nightly), + ctx.config.find_path_config(ctx.is_nightly), ) { // Variants with trivial paths are already added by the existing completion logic, // so we should avoid adding these twice diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index f75123324f377..8b4f315ac5733 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -2,7 +2,7 @@ use std::ops::ControlFlow; -use hir::{Complete, HasContainer, ItemContainer, MethodCandidateCallback, Name}; +use hir::{Complete, Function, HasContainer, ItemContainer, MethodCandidateCallback}; use ide_db::FxHashSet; use syntax::SmolStr; @@ -237,7 +237,10 @@ fn complete_methods( struct Callback<'a, F> { ctx: &'a CompletionContext<'a>, f: F, - seen_methods: FxHashSet, + // We deliberately deduplicate by function ID and not name, because while inherent methods cannot be + // duplicated, trait methods can. And it is still useful to show all of them (even when there + // is also an inherent method, especially considering that it may be private, and filtered later). + seen_methods: FxHashSet, } impl MethodCandidateCallback for Callback<'_, F> @@ -247,9 +250,7 @@ fn complete_methods( // We don't want to exclude inherent trait methods - that is, methods of traits available from // `where` clauses or `dyn Trait`. fn on_inherent_method(&mut self, func: hir::Function) -> ControlFlow<()> { - if func.self_param(self.ctx.db).is_some() - && self.seen_methods.insert(func.name(self.ctx.db)) - { + if func.self_param(self.ctx.db).is_some() && self.seen_methods.insert(func) { (self.f)(func); } ControlFlow::Continue(()) @@ -265,9 +266,7 @@ fn complete_methods( return ControlFlow::Continue(()); } - if func.self_param(self.ctx.db).is_some() - && self.seen_methods.insert(func.name(self.ctx.db)) - { + if func.self_param(self.ctx.db).is_some() && self.seen_methods.insert(func) { (self.f)(func); } @@ -1384,14 +1383,15 @@ fn baz() { fn skip_iter() { check_no_kw( r#" - //- minicore: iterator + //- minicore: iterator, clone, builtin_impls fn foo() { [].$0 } "#, expect![[r#" - me clone() (as Clone) fn(&self) -> Self - me into_iter() (as IntoIterator) fn(self) -> ::IntoIter + me clone() (as Clone) fn(&self) -> Self + me fmt(…) (use core::fmt::Debug) fn(&self, &mut Formatter<'_>) -> Result<(), Error> + me into_iter() (as IntoIterator) fn(self) -> ::IntoIter "#]], ); check_no_kw( @@ -1501,7 +1501,9 @@ fn main() { bar.$0 } "#, - expect![[r#""#]], + expect![[r#" + me foo() fn(self: Bar) + "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs index 2133291b1de15..a7df0ab3863b1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs @@ -61,6 +61,8 @@ pub(crate) fn complete_expr_path( after_if_expr, in_condition, incomplete_let, + after_incomplete_let, + in_value, ref ref_expr_parent, after_amp, ref is_func_update, @@ -139,9 +141,8 @@ pub(crate) fn complete_expr_path( Qualified::With { resolution: None, .. } => {} Qualified::With { resolution: Some(resolution), .. } => { // Add associated types on type parameters and `Self`. - ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| { + ctx.scope.assoc_type_shorthand_candidates(resolution, |alias| { acc.add_type_alias(ctx, alias); - None::<()> }); match resolution { hir::PathResolution::Def(hir::ModuleDef::Module(module)) => { @@ -254,7 +255,7 @@ pub(crate) fn complete_expr_path( .find_path( ctx.db, hir::ModuleDef::from(strukt), - ctx.config.import_path_config(ctx.is_nightly), + ctx.config.find_path_config(ctx.is_nightly), ) .filter(|it| it.len() > 1); @@ -276,7 +277,7 @@ pub(crate) fn complete_expr_path( .find_path( ctx.db, hir::ModuleDef::from(un), - ctx.config.import_path_config(ctx.is_nightly), + ctx.config.find_path_config(ctx.is_nightly), ) .filter(|it| it.len() > 1); @@ -361,10 +362,16 @@ pub(crate) fn complete_expr_path( add_keyword("loop", "loop {\n $0\n}"); if in_match_guard { add_keyword("if", "if $0"); + } else if in_value { + add_keyword("if", "if $1 {\n $2\n} else {\n $0\n}"); } else { add_keyword("if", "if $1 {\n $0\n}"); } - add_keyword("if let", "if let $1 = $2 {\n $0\n}"); + if in_value { + add_keyword("if let", "if let $1 = $2 {\n $3\n} else {\n $0\n}"); + } else { + add_keyword("if let", "if let $1 = $2 {\n $0\n}"); + } add_keyword("for", "for $1 in $2 {\n $0\n}"); add_keyword("true", "true"); add_keyword("false", "false"); @@ -379,8 +386,11 @@ pub(crate) fn complete_expr_path( add_keyword("let", "let $1 = $0;"); } - if after_if_expr { + if after_if_expr || after_incomplete_let { add_keyword("else", "else {\n $0\n}"); + } + + if after_if_expr { add_keyword("else if", "else if $1 {\n $0\n}"); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index dad8a76de87df..d1e05a4359f19 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -257,7 +257,7 @@ fn import_on_the_fly( }; let user_input_lowercased = potential_import_name.to_lowercase(); - let import_cfg = ctx.config.import_path_config(ctx.is_nightly); + let import_cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, import_cfg, ctx.config.insert_use.prefix_kind) @@ -304,7 +304,7 @@ fn import_on_the_fly_pat_( ItemInNs::Values(def) => matches!(def, hir::ModuleDef::Const(_)), }; let user_input_lowercased = potential_import_name.to_lowercase(); - let cfg = ctx.config.import_path_config(ctx.is_nightly); + let cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind) @@ -346,7 +346,7 @@ fn import_on_the_fly_method( let user_input_lowercased = potential_import_name.to_lowercase(); - let cfg = ctx.config.import_path_config(ctx.is_nightly); + let cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs index 64bb1fce6ba02..6162d98372839 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs @@ -238,6 +238,88 @@ fn main() { r#" fn main() { let x = if $1 { + $2 +} else { + $0 +}; + let y = 92; +} +"#, + ); + + check_edit( + "else", + r#" +fn main() { + let x = if true { + () + } $0 + let y = 92; +} +"#, + r#" +fn main() { + let x = if true { + () + } else { + $0 +}; + let y = 92; +} +"#, + ); + + check_edit( + "else if", + r#" +fn main() { + let x = if true { + () + } $0 else {}; +} +"#, + r#" +fn main() { + let x = if true { + () + } else if $1 { + $0 +} else {}; +} +"#, + ); + + check_edit( + "else if", + r#" +fn main() { + let x = if true { + () + } $0 else if true {}; +} +"#, + r#" +fn main() { + let x = if true { + () + } else if $1 { + $0 +} else if true {}; +} +"#, + ); + + check_edit( + "else", + r#" +fn main() { + let x = 2 $0 + let y = 92; +} +"#, + r#" +fn main() { + let x = 2 else { $0 }; let y = 92; @@ -335,6 +417,120 @@ fn main() { ) } + #[test] + fn if_completion_in_parameter() { + check_edit( + "if", + r" +fn main() { + foo($0) +} +", + r" +fn main() { + foo(if $1 { + $2 +} else { + $0 +}) +} +", + ); + + check_edit( + "if", + r" +fn main() { + foo($0, 2) +} +", + r" +fn main() { + foo(if $1 { + $2 +} else { + $0 +}, 2) +} +", + ); + + check_edit( + "if", + r" +fn main() { + foo(2, $0) +} +", + r" +fn main() { + foo(2, if $1 { + $2 +} else { + $0 +}) +} +", + ); + + check_edit( + "if let", + r" +fn main() { + foo(2, $0) +} +", + r" +fn main() { + foo(2, if let $1 = $2 { + $3 +} else { + $0 +}) +} +", + ); + } + + #[test] + fn if_completion_in_let_statement() { + check_edit( + "if", + r" +fn main() { + let x = $0; +} +", + r" +fn main() { + let x = if $1 { + $2 +} else { + $0 +}; +} +", + ); + + check_edit( + "if let", + r" +fn main() { + let x = $0; +} +", + r" +fn main() { + let x = if let $1 = $2 { + $3 +} else { + $0 +}; +} +", + ); + } + #[test] fn completes_let_in_block() { check_edit( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 0058611a61539..d355fdbe0739c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -63,7 +63,7 @@ pub(crate) fn complete_postfix( None => return, }; - let cfg = ctx.config.import_path_config(ctx.is_nightly); + let cfg = ctx.config.find_path_config(ctx.is_nightly); if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() && receiver_ty.impls_trait(ctx.db, drop_trait, &[]) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs index 7c38c7d8ce44f..3112462cda4e8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs @@ -37,9 +37,7 @@ pub(crate) fn complete_type_path( true } // Type things are fine - ScopeDef::ModuleDef( - BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_), - ) + ScopeDef::ModuleDef(BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TypeAlias(_)) | ScopeDef::AdtSelfType(_) | ScopeDef::Unknown | ScopeDef::GenericParam(TypeParam(_)) => location.complete_types(), @@ -79,9 +77,8 @@ pub(crate) fn complete_type_path( Qualified::With { resolution: None, .. } => {} Qualified::With { resolution: Some(resolution), .. } => { // Add associated types on type parameters and `Self`. - ctx.scope.assoc_type_shorthand_candidates(resolution, |_, alias| { + ctx.scope.assoc_type_shorthand_candidates(resolution, |alias| { acc.add_type_alias(ctx, alias); - None::<()> }); match resolution { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs index 844fce5ef8019..b7367cb62f099 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs @@ -4,8 +4,11 @@ //! module, and we use to statically check that we only produce snippet //! completions if we are allowed to. -use hir::ImportPathConfig; -use ide_db::{SnippetCap, imports::insert_use::InsertUseConfig}; +use hir::FindPathConfig; +use ide_db::{ + SnippetCap, + imports::{import_assets::ImportPathConfig, insert_use::InsertUseConfig}, +}; use crate::{CompletionFieldsToResolve, snippet::Snippet}; @@ -59,12 +62,20 @@ impl CompletionConfig<'_> { .flat_map(|snip| snip.prefix_triggers.iter().map(move |trigger| (&**trigger, snip))) } - pub fn import_path_config(&self, allow_unstable: bool) -> ImportPathConfig { - ImportPathConfig { + pub fn find_path_config(&self, allow_unstable: bool) -> FindPathConfig { + FindPathConfig { prefer_no_std: self.prefer_no_std, prefer_prelude: self.prefer_prelude, prefer_absolute: self.prefer_absolute, allow_unstable, } } + + pub fn import_path_config(&self) -> ImportPathConfig { + ImportPathConfig { + prefer_no_std: self.prefer_no_std, + prefer_prelude: self.prefer_prelude, + prefer_absolute: self.prefer_absolute, + } + } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index cfd7f80d40b30..007475688d209 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -147,6 +147,8 @@ pub(crate) struct PathExprCtx<'db> { /// Whether this expression is the direct condition of an if or while expression pub(crate) in_condition: bool, pub(crate) incomplete_let: bool, + pub(crate) after_incomplete_let: bool, + pub(crate) in_value: bool, pub(crate) ref_expr_parent: Option, pub(crate) after_amp: bool, /// The surrounding RecordExpression we are completing a functional update @@ -525,7 +527,6 @@ impl CompletionContext<'_> { hir::ModuleDef::Const(it) => self.is_visible(it), hir::ModuleDef::Static(it) => self.is_visible(it), hir::ModuleDef::Trait(it) => self.is_visible(it), - hir::ModuleDef::TraitAlias(it) => self.is_visible(it), hir::ModuleDef::TypeAlias(it) => self.is_visible(it), hir::ModuleDef::Macro(it) => self.is_visible(it), hir::ModuleDef::BuiltinType(_) => Visible::Yes, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 2eabf99fc697e..b33a547dee972 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -559,6 +559,7 @@ fn expected_type_and_name<'db>( token: &SyntaxToken, name_like: &ast::NameLike, ) -> (Option>, Option) { + let token = prev_special_biased_token_at_trivia(token.clone()); let mut node = match token.parent() { Some(it) => it, None => return (None, None), @@ -629,6 +630,17 @@ fn expected_type_and_name<'db>( .map(TypeInfo::original); (ty, None) }, + ast::BinExpr(it) => { + if let Some(ast::BinaryOp::Assignment { op: None }) = it.op_kind() { + let ty = it.lhs() + .and_then(|lhs| sema.type_of_expr(&lhs)) + .or_else(|| it.rhs().and_then(|rhs| sema.type_of_expr(&rhs))) + .map(TypeInfo::original); + (ty, None) + } else { + (None, None) + } + }, ast::ArgList(_) => { cov_mark::hit!(expected_type_fn_param); ActiveParameter::at_token( @@ -712,6 +724,18 @@ fn expected_type_and_name<'db>( let def = sema.to_def(&it); (def.map(|def| def.ret_type(sema.db)), None) }, + ast::ReturnExpr(it) => { + let fn_ = sema.ancestors_with_macros(it.syntax().clone()) + .find_map(Either::::cast); + let ty = fn_.and_then(|f| match f { + Either::Left(f) => Some(sema.to_def(&f)?.ret_type(sema.db)), + Either::Right(f) => { + let ty = sema.type_of_expr(&f.into())?.original.as_callable(sema.db)?; + Some(ty.return_type()) + }, + }); + (ty, None) + }, ast::ClosureExpr(it) => { let ty = sema.type_of_expr(&it.into()); ty.and_then(|ty| ty.original.as_callable(sema.db)) @@ -923,20 +947,39 @@ fn classify_name_ref<'db>( None } }; - let after_if_expr = |node: SyntaxNode| { - let prev_expr = (|| { - let node = match node.parent().and_then(ast::ExprStmt::cast) { - Some(stmt) => stmt.syntax().clone(), - None => node, - }; - let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?; + let prev_expr = |node: SyntaxNode| { + let node = match node.parent().and_then(ast::ExprStmt::cast) { + Some(stmt) => stmt.syntax().clone(), + None => node, + }; + let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?; - ast::ExprStmt::cast(prev_sibling.clone()) - .and_then(|it| it.expr()) - .or_else(|| ast::Expr::cast(prev_sibling)) - })(); + match_ast! { + match prev_sibling { + ast::ExprStmt(stmt) => stmt.expr().filter(|_| stmt.semicolon_token().is_none()), + ast::LetStmt(stmt) => stmt.initializer().filter(|_| stmt.semicolon_token().is_none()), + ast::Expr(expr) => Some(expr), + _ => None, + } + } + }; + let after_if_expr = |node: SyntaxNode| { + let prev_expr = prev_expr(node); matches!(prev_expr, Some(ast::Expr::IfExpr(_))) }; + let after_incomplete_let = |node: SyntaxNode| { + prev_expr(node).and_then(|it| it.syntax().parent()).and_then(ast::LetStmt::cast) + }; + let before_else_kw = |node: &SyntaxNode| { + node.parent() + .and_then(ast::ExprStmt::cast) + .filter(|stmt| stmt.semicolon_token().is_none()) + .and_then(|stmt| non_trivia_sibling(stmt.syntax().clone().into(), Direction::Next)) + .and_then(NodeOrToken::into_node) + .filter(|next| next.kind() == SyntaxKind::ERROR) + .and_then(|next| next.first_token()) + .is_some_and(|token| token.kind() == SyntaxKind::ELSE_KW) + }; // We do not want to generate path completions when we are sandwiched between an item decl signature and its body. // ex. trait Foo $0 {} @@ -1025,9 +1068,6 @@ fn classify_name_ref<'db>( sema.source(trait_)?.value.generic_param_list() } } - hir::ModuleDef::TraitAlias(trait_) => { - sema.source(trait_)?.value.generic_param_list() - } hir::ModuleDef::TypeAlias(ty_) => { sema.source(ty_)?.value.generic_param_list() } @@ -1162,19 +1202,23 @@ fn classify_name_ref<'db>( Some(res) }; - let is_in_condition = |it: &ast::Expr| { + fn is_in_condition(it: &ast::Expr) -> bool { (|| { let parent = it.syntax().parent()?; if let Some(expr) = ast::WhileExpr::cast(parent.clone()) { Some(expr.condition()? == *it) - } else if let Some(expr) = ast::IfExpr::cast(parent) { + } else if let Some(expr) = ast::IfExpr::cast(parent.clone()) { Some(expr.condition()? == *it) + } else if let Some(expr) = ast::BinExpr::cast(parent) + && expr.op_token()?.kind() == T![&&] + { + Some(is_in_condition(&expr.into())) } else { None } })() .unwrap_or(false) - }; + } let make_path_kind_expr = |expr: ast::Expr| { let it = expr.syntax(); @@ -1235,10 +1279,15 @@ fn classify_name_ref<'db>( }; let is_func_update = func_update_record(it); let in_condition = is_in_condition(&expr); + let after_incomplete_let = after_incomplete_let(it.clone()).is_some(); + let incomplete_expr_stmt = + it.parent().and_then(ast::ExprStmt::cast).map(|it| it.semicolon_token().is_none()); let incomplete_let = it .parent() .and_then(ast::LetStmt::cast) - .is_some_and(|it| it.semicolon_token().is_none()); + .is_some_and(|it| it.semicolon_token().is_none()) + || after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw(it); + let in_value = it.parent().and_then(Either::::cast).is_some(); let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax()); let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) { @@ -1259,7 +1308,9 @@ fn classify_name_ref<'db>( is_func_update, innermost_ret_ty, self_param, + in_value, incomplete_let, + after_incomplete_let, impl_, in_match_guard, }, @@ -1856,3 +1907,26 @@ fn next_non_trivia_sibling(ele: SyntaxElement) -> Option { } None } + +fn prev_special_biased_token_at_trivia(mut token: SyntaxToken) -> SyntaxToken { + while token.kind().is_trivia() + && let Some(prev) = token.prev_token() + && let T![=] + | T![+=] + | T![/=] + | T![*=] + | T![%=] + | T![>>=] + | T![<<=] + | T![-=] + | T![|=] + | T![&=] + | T![^=] + | T![return] + | T![break] + | T![continue] = prev.kind() + { + token = prev + } + token +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs index 75c20968e1e5f..445afa75f3f4f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs @@ -1,3 +1,4 @@ +use base_db::salsa; use expect_test::{Expect, expect}; use hir::HirDisplay; @@ -9,11 +10,16 @@ use crate::{ fn check_expected_type_and_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (db, pos) = position(ra_fixture); let config = TEST_CONFIG; - let (completion_context, _analysis) = CompletionContext::new(&db, pos, &config).unwrap(); + let (completion_context, _analysis) = + salsa::attach(&db, || CompletionContext::new(&db, pos, &config).unwrap()); let ty = completion_context .expected_type - .map(|t| t.display_test(&db, completion_context.krate.to_display_target(&db)).to_string()) + .map(|t| { + salsa::attach(&db, || { + t.display_test(&db, completion_context.krate.to_display_target(&db)).to_string() + }) + }) .unwrap_or("?".to_owned()); let name = @@ -434,3 +440,89 @@ fn f(thing: u32) -> &u32 { expect!["ty: u32, name: ?"], ); } + +#[test] +fn expected_type_assign() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let x: &mut State = &mut State::Stop; + x = $0; +} +"#, + expect![[r#"ty: &'_ mut State, name: ?"#]], + ); +} + +#[test] +fn expected_type_deref_assign() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let x: &mut State = &mut State::Stop; + match x { + State::Stop => { + *x = $0; + }, + } +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); +} + +#[test] +fn expected_type_deref_assign_at_block_end() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let x: &mut State = &mut State::Stop; + match x { + State::Stop => { + *x = $0 + }, + } +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); +} + +#[test] +fn expected_type_return_expr() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() -> State { + let _: i32 = if true { + 8 + } else { + return $0; + }; +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); +} + +#[test] +fn expected_type_return_expr_in_closure() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let _f: fn() -> State = || { + let _: i32 = if true { + 8 + } else { + return $0; + }; + }; +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index f27cd07816657..5fb9dc93c93da 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -399,7 +399,6 @@ impl CompletionItemKind { SymbolKind::Struct => "st", SymbolKind::ToolModule => "tm", SymbolKind::Trait => "tt", - SymbolKind::TraitAlias => "tr", SymbolKind::TypeAlias => "ta", SymbolKind::TypeParam => "tp", SymbolKind::Union => "un", diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index 3d7a4067c2cd0..dbf68dbe33afe 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -299,7 +299,7 @@ pub(crate) fn render_expr( .unwrap_or_else(|| String::from("...")) }; - let cfg = ctx.config.import_path_config(ctx.is_nightly); + let cfg = ctx.config.find_path_config(ctx.is_nightly); let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg, ctx.display_target).ok()?; @@ -486,10 +486,7 @@ fn render_resolution_path( | ScopeDef::Label(_) | ScopeDef::Unknown | ScopeDef::ModuleDef( - ModuleDef::Trait(_) - | ModuleDef::TraitAlias(_) - | ModuleDef::Module(_) - | ModuleDef::TypeAlias(_), + ModuleDef::Trait(_) | ModuleDef::Module(_) | ModuleDef::TypeAlias(_), ) => (), }; @@ -542,9 +539,6 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind { ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::SymbolKind(SymbolKind::Const), ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::SymbolKind(SymbolKind::Static), ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::SymbolKind(SymbolKind::Trait), - ScopeDef::ModuleDef(TraitAlias(..)) => { - CompletionItemKind::SymbolKind(SymbolKind::TraitAlias) - } ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::SymbolKind(SymbolKind::TypeAlias), ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType, ScopeDef::GenericParam(param) => CompletionItemKind::SymbolKind(match param { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs index 9dc0c0234dc56..d326098f94071 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs @@ -164,7 +164,7 @@ impl Snippet { } fn import_edits(ctx: &CompletionContext<'_>, requires: &[ModPath]) -> Option> { - let import_cfg = ctx.config.import_path_config(ctx.is_nightly); + let import_cfg = ctx.config.find_path_config(ctx.is_nightly); let resolve = |import| { let item = ctx.scope.resolve_mod_path(import).next()?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index fdc3d9a13bc92..809a26bf5de47 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -24,9 +24,9 @@ mod type_pos; mod use_tree; mod visibility; -use base_db::SourceDatabase; +use base_db::{SourceDatabase, salsa}; use expect_test::Expect; -use hir::PrefixKind; +use hir::{PrefixKind, setup_tracing}; use ide_db::{ FilePosition, RootDatabase, SnippetCap, imports::insert_use::{ImportGranularity, InsertUseConfig}, @@ -120,6 +120,8 @@ fn completion_list_with_config_raw( include_keywords: bool, trigger_character: Option, ) -> Vec { + let _tracing = setup_tracing(); + // filter out all but one built-in type completion for smaller test outputs let items = get_all_items(config, ra_fixture, trigger_character); items @@ -241,7 +243,7 @@ pub(crate) fn check_edit_with_config( let ra_fixture_after = trim_indent(ra_fixture_after); let (db, position) = position(ra_fixture_before); let completions: Vec = - crate::completions(&db, &config, position, None).unwrap(); + salsa::attach(&db, || crate::completions(&db, &config, position, None).unwrap()); let (completion,) = completions .iter() .filter(|it| it.lookup() == what) @@ -304,7 +306,7 @@ pub(crate) fn get_all_items( trigger_character: Option, ) -> Vec { let (db, position) = position(code); - let res = crate::completions(&db, &config, position, trigger_character) + let res = salsa::attach(&db, || crate::completions(&db, &config, position, trigger_character)) .map_or_else(Vec::default, Into::into); // validate res.iter().for_each(|it| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs index 33f729f016645..5cc72ef845bf5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs @@ -450,6 +450,155 @@ fn completes_in_let_initializer() { ) } +#[test] +fn completes_let_else() { + check( + r#"fn main() { let _ = 2 $0 }"#, + expect![[r#" + fn main() fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + + check( + r#"fn main() { let _ = 2 el$0 }"#, + expect![[r#" + fn main() fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + + check_edit( + "else", + r#" +fn main() { + let _ = 2 $0 +} +"#, + r#" +fn main() { + let _ = 2 else { + $0 +}; +} +"#, + ); + + check_edit( + "else", + r#" +fn main() { + let _ = 2 el$0 +} +"#, + r#" +fn main() { + let _ = 2 else { + $0 +}; +} +"#, + ); + + check_edit( + "else", + r#" +fn main() { + let _ = 2 $0; +} +"#, + r#" +fn main() { + let _ = 2 else { + $0 +}; +} +"#, + ); + + check_edit( + "else", + r#" +fn main() { + let _ = 2 el$0; +} +"#, + r#" +fn main() { + let _ = 2 else { + $0 +}; +} +"#, + ); +} + #[test] fn completes_after_ref_expr() { check( @@ -1210,6 +1359,353 @@ fn foo() { if foo {} el$0 { let x = 92; } } sn ppd "#]], ); + check( + r#" +fn foo() { let x = if foo {} $0 } +"#, + expect![[r#" + fn foo() fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check( + r#" +fn foo() { let x = if foo {} el$0 } +"#, + expect![[r#" + fn foo() fn() + lc x () + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check( + r#" +fn foo() { let x = if foo {} $0 let y = 92; } +"#, + expect![[r#" + fn foo() fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check( + r#" +fn foo() { let x = if foo {} el$0 let y = 92; } +"#, + expect![[r#" + fn foo() fn() + lc x () + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check( + r#" +fn foo() { let x = if foo {} $0; } +"#, + expect![[r#" + fn foo() fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check( + r#" +fn foo() { let x = if foo {} el$0; } +"#, + expect![[r#" + fn foo() fn() + lc x () + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check( + r#" +fn foo() { let x = if foo {} $0; let y = 92; } +"#, + expect![[r#" + fn foo() fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + check( + r#" +fn foo() { let x = if foo {} $0 else {}; } +"#, + expect![[r#" + fn foo fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw else + kw else if + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); } #[test] @@ -1515,7 +2011,7 @@ fn main() { en Enum Enum fn function() fn() fn main() fn() - lc variable &'static str + lc variable &str ma helper!(…) macro_rules! helper ma m!(…) macro_rules! m ma makro!(…) macro_rules! makro @@ -1990,6 +2486,7 @@ fn bar() { md rust_2024 (use core::prelude::rust_2024) tt Clone tt Copy + tt FromIterator tt IntoIterator tt Iterator ta Result (use core::fmt::Result) @@ -2275,3 +2772,53 @@ fn foo() { "#]], ); } + +#[test] +fn let_in_condition() { + check_edit("let", r#"fn f() { if $0 {} }"#, r#"fn f() { if let $1 = $0 {} }"#); +} + +#[test] +fn let_in_let_chain() { + check_edit("let", r#"fn f() { if true && $0 {} }"#, r#"fn f() { if true && let $1 = $0 {} }"#); +} + +#[test] +fn private_inherent_and_public_trait() { + check( + r#" +struct Foo; + +mod private { + impl super::Foo { + fn method(&self) {} + } +} + +trait Trait { + fn method(&self) {} +} +impl Trait for Foo {} + +fn main() { + Foo.$0 +} + "#, + expect![[r#" + me method() (as Trait) fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn const const {} + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 27c91bc7c4558..d78a3731011e8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -1,3 +1,4 @@ +use base_db::salsa; use expect_test::{Expect, expect}; use crate::{ @@ -19,25 +20,29 @@ fn check_with_config( let (ctx, analysis) = crate::context::CompletionContext::new(&db, position, &config).unwrap(); let mut acc = crate::completions::Completions::default(); - if let CompletionAnalysis::Name(NameContext { kind: NameKind::IdentPat(pat_ctx), .. }) = - &analysis - { - crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pat_ctx); - } - if let CompletionAnalysis::NameRef(name_ref_ctx) = &analysis { - match &name_ref_ctx.kind { - NameRefKind::Path(path) => { - crate::completions::flyimport::import_on_the_fly_path(&mut acc, &ctx, path); - } - NameRefKind::DotAccess(dot_access) => { - crate::completions::flyimport::import_on_the_fly_dot(&mut acc, &ctx, dot_access); - } - NameRefKind::Pattern(pattern) => { - crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pattern); + salsa::attach(ctx.db, || { + if let CompletionAnalysis::Name(NameContext { kind: NameKind::IdentPat(pat_ctx), .. }) = + &analysis + { + crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pat_ctx); + } + if let CompletionAnalysis::NameRef(name_ref_ctx) = &analysis { + match &name_ref_ctx.kind { + NameRefKind::Path(path) => { + crate::completions::flyimport::import_on_the_fly_path(&mut acc, &ctx, path); + } + NameRefKind::DotAccess(dot_access) => { + crate::completions::flyimport::import_on_the_fly_dot( + &mut acc, &ctx, dot_access, + ); + } + NameRefKind::Pattern(pattern) => { + crate::completions::flyimport::import_on_the_fly_pat(&mut acc, &ctx, pattern); + } + _ => (), } - _ => (), } - } + }); expect.assert_eq(&super::render_completion_list(Vec::from(acc))); } @@ -114,7 +119,7 @@ fn main() { } "#, r#" -use dep::{some_module::{SecondStruct, ThirdStruct}, FirstStruct}; +use dep::{FirstStruct, some_module::{SecondStruct, ThirdStruct}}; fn main() { ThirdStruct diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs index 148203107c4cf..84ddff8f617ac 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs @@ -677,6 +677,7 @@ fn bar() -> Bar { expect![[r#" fn foo() (as Foo) fn() -> Self ex Bar + ex Bar::foo() ex bar() "#]], ); @@ -706,6 +707,7 @@ fn bar() -> Bar { fn bar() fn() fn foo() (as Foo) fn() -> Self ex Bar + ex Bar::foo() ex bar() "#]], ); @@ -734,6 +736,7 @@ fn bar() -> Bar { expect![[r#" fn foo() (as Foo) fn() -> Self ex Bar + ex Bar::foo() ex bar() "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs index 9edfc113f764c..4fb7d142ed5f1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs @@ -125,7 +125,6 @@ pub fn generic_def_for_node( hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::TraitAlias(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => { variant = Some(it); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index 2a4fcf6a2e5f7..c051fd863de6f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -16,7 +16,7 @@ use hir::{ ExternCrateDecl, Field, Function, GenericDef, GenericParam, GenericSubstitution, HasContainer, HasVisibility, HirDisplay, Impl, InlineAsmOperand, ItemContainer, Label, Local, Macro, Module, ModuleDef, Name, PathResolution, Semantics, Static, StaticLifetime, Struct, ToolModule, Trait, - TraitAlias, TupleField, TypeAlias, Variant, VariantDef, Visibility, + TupleField, TypeAlias, Variant, VariantDef, Visibility, }; use span::Edition; use stdx::{format_to, impl_from}; @@ -40,7 +40,6 @@ pub enum Definition { Const(Const), Static(Static), Trait(Trait), - TraitAlias(TraitAlias), TypeAlias(TypeAlias), SelfType(Impl), GenericParam(GenericParam), @@ -83,7 +82,6 @@ impl Definition { Definition::Const(it) => it.module(db), Definition::Static(it) => it.module(db), Definition::Trait(it) => it.module(db), - Definition::TraitAlias(it) => it.module(db), Definition::TypeAlias(it) => it.module(db), Definition::Variant(it) => it.module(db), Definition::SelfType(it) => it.module(db), @@ -122,7 +120,6 @@ impl Definition { Definition::Const(it) => container_to_definition(it.container(db)), Definition::Static(it) => container_to_definition(it.container(db)), Definition::Trait(it) => container_to_definition(it.container(db)), - Definition::TraitAlias(it) => container_to_definition(it.container(db)), Definition::TypeAlias(it) => container_to_definition(it.container(db)), Definition::Variant(it) => Some(Adt::Enum(it.parent_enum(db)).into()), Definition::SelfType(it) => Some(it.module(db).into()), @@ -151,7 +148,6 @@ impl Definition { Definition::Const(it) => it.visibility(db), Definition::Static(it) => it.visibility(db), Definition::Trait(it) => it.visibility(db), - Definition::TraitAlias(it) => it.visibility(db), Definition::TypeAlias(it) => it.visibility(db), Definition::Variant(it) => it.visibility(db), Definition::ExternCrateDecl(it) => it.visibility(db), @@ -185,7 +181,6 @@ impl Definition { Definition::Const(it) => it.name(db)?, Definition::Static(it) => it.name(db), Definition::Trait(it) => it.name(db), - Definition::TraitAlias(it) => it.name(db), Definition::TypeAlias(it) => it.name(db), Definition::BuiltinType(it) => it.name(), Definition::TupleField(it) => it.name(), @@ -230,7 +225,6 @@ impl Definition { Definition::Const(it) => it.docs_with_rangemap(db), Definition::Static(it) => it.docs_with_rangemap(db), Definition::Trait(it) => it.docs_with_rangemap(db), - Definition::TraitAlias(it) => it.docs_with_rangemap(db), Definition::TypeAlias(it) => { it.docs_with_rangemap(db).or_else(|| { // docs are missing, try to fall back to the docs of the aliased item. @@ -265,8 +259,8 @@ impl Definition { Definition::ExternCrateDecl(it) => it.docs_with_rangemap(db), Definition::BuiltinAttr(it) => { - let name = it.name(db); - let AttributeTemplate { word, list, name_value_str } = it.template(db)?; + let name = it.name(); + let AttributeTemplate { word, list, name_value_str } = it.template()?; let mut docs = "Valid forms are:".to_owned(); if word { format_to!(docs, "\n - #\\[{}]", name.display(db, display_target.edition)); @@ -321,7 +315,6 @@ impl Definition { Definition::Const(it) => it.display(db, display_target).to_string(), Definition::Static(it) => it.display(db, display_target).to_string(), Definition::Trait(it) => it.display(db, display_target).to_string(), - Definition::TraitAlias(it) => it.display(db, display_target).to_string(), Definition::TypeAlias(it) => it.display(db, display_target).to_string(), Definition::BuiltinType(it) => { it.name().display(db, display_target.edition).to_string() @@ -355,7 +348,7 @@ impl Definition { Definition::Label(it) => it.name(db).display(db, display_target.edition).to_string(), Definition::ExternCrateDecl(it) => it.display(db, display_target).to_string(), Definition::BuiltinAttr(it) => { - format!("#[{}]", it.name(db).display(db, display_target.edition)) + format!("#[{}]", it.name().display(db, display_target.edition)) } Definition::ToolModule(it) => { it.name(db).display(db, display_target.edition).to_string() @@ -370,7 +363,7 @@ impl Definition { } } -fn find_std_module( +pub fn find_std_module( famous_defs: &FamousDefs<'_, '_>, name: &str, edition: Edition, @@ -589,7 +582,6 @@ impl<'db> NameClass<'db> { ast::Item::Module(it) => Definition::Module(sema.to_def(&it)?), ast::Item::Static(it) => Definition::Static(sema.to_def(&it)?), ast::Item::Trait(it) => Definition::Trait(sema.to_def(&it)?), - ast::Item::TraitAlias(it) => Definition::TraitAlias(sema.to_def(&it)?), ast::Item::TypeAlias(it) => Definition::TypeAlias(sema.to_def(&it)?), ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)), ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)), @@ -895,7 +887,7 @@ impl<'db> NameRefClass<'db> { } impl_from!( - Field, Module, Function, Adt, Variant, Const, Static, Trait, TraitAlias, TypeAlias, BuiltinType, Local, + Field, Module, Function, Adt, Variant, Const, Static, Trait, TypeAlias, BuiltinType, Local, GenericParam, Label, Macro, ExternCrateDecl for Definition ); @@ -975,7 +967,6 @@ impl From for Definition { ModuleDef::Const(it) => Definition::Const(it), ModuleDef::Static(it) => Definition::Static(it), ModuleDef::Trait(it) => Definition::Trait(it), - ModuleDef::TraitAlias(it) => Definition::TraitAlias(it), ModuleDef::TypeAlias(it) => Definition::TypeAlias(it), ModuleDef::Macro(it) => Definition::Macro(it), ModuleDef::BuiltinType(it) => Definition::BuiltinType(it), @@ -1017,7 +1008,6 @@ impl From for Definition { GenericDef::Function(it) => it.into(), GenericDef::Adt(it) => it.into(), GenericDef::Trait(it) => it.into(), - GenericDef::TraitAlias(it) => it.into(), GenericDef::TypeAlias(it) => it.into(), GenericDef::Impl(it) => it.into(), GenericDef::Const(it) => it.into(), @@ -1033,7 +1023,6 @@ impl TryFrom for GenericDef { Definition::Function(it) => Ok(it.into()), Definition::Adt(it) => Ok(it.into()), Definition::Trait(it) => Ok(it.into()), - Definition::TraitAlias(it) => Ok(it.into()), Definition::TypeAlias(it) => Ok(it.into()), Definition::SelfType(it) => Ok(it.into()), Definition::Const(it) => Ok(it.into()), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs index 30c355f8b3f93..cab19aadfd010 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs @@ -195,8 +195,7 @@ macro_rules! impl_has_docs { } impl_has_docs![ - Variant, Field, Static, Const, Trait, TraitAlias, TypeAlias, Macro, Function, Adt, Module, - Impl, Crate, + Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate, ]; macro_rules! impl_has_docs_enum { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs index 8e687385086fc..8eea2b81bab6d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs @@ -210,6 +210,10 @@ impl FamousDefs<'_, '_> { fn find_lang_crate(&self, origin: LangCrateOrigin) -> Option { let krate = self.1; let db = self.0.db; + if krate.origin(db) == CrateOrigin::Lang(origin) { + return Some(krate); + } + let res = krate .dependencies(db) .into_iter() diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 9f35988924b92..0c235c8d9a57a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -3,7 +3,7 @@ use std::ops::ControlFlow; use hir::{ - AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, HasCrate, ImportPathConfig, + AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, FindPathConfig, HasCrate, ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Trait, TyFingerprint, Type, db::HirDatabase, }; @@ -19,6 +19,17 @@ use crate::{ items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT}, }; +#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)] +pub struct ImportPathConfig { + /// If true, prefer to unconditionally use imports of the `core` and `alloc` crate + /// over the std. + pub prefer_no_std: bool, + /// If true, prefer import paths containing a prelude module. + pub prefer_prelude: bool, + /// If true, prefer abs path (starting with `::`) where it is available. + pub prefer_absolute: bool, +} + /// A candidate for import, derived during various IDE activities: /// * completion with imports on the fly proposals /// * completion edit resolve requests @@ -296,6 +307,12 @@ impl<'db> ImportAssets<'db> { Some(it) => it, None => return >::default().into_iter(), }; + let cfg = FindPathConfig { + prefer_no_std: cfg.prefer_no_std, + prefer_prelude: cfg.prefer_prelude, + prefer_absolute: cfg.prefer_absolute, + allow_unstable: sema.is_nightly(scope.krate()), + }; let db = sema.db; let krate = self.module_with_candidate.krate(); let scope_definitions = self.scope_definitions(sema); @@ -475,8 +492,6 @@ fn validate_resolvable( } // FIXME ModuleDef::Trait(_) => return None, - // FIXME - ModuleDef::TraitAlias(_) => return None, ModuleDef::TypeAlias(alias) => alias.ty(db), ModuleDef::BuiltinType(builtin) => builtin.ty(db), ModuleDef::Adt(adt) => adt.ty(db), @@ -700,7 +715,7 @@ fn get_mod_path( item_to_search: ItemInNs, module_with_candidate: &Module, prefixed: Option, - cfg: ImportPathConfig, + cfg: FindPathConfig, ) -> Option { if let Some(prefix_kind) = prefixed { module_with_candidate.find_use_path(db, item_to_search, prefix_kind, cfg) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs index 08cd8f28608ca..b174adfd7e448 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs @@ -194,7 +194,7 @@ fn insert_use_with_alias_option( use_tree = use_tree.clone_for_update(); use_tree.wrap_in_tree_list(); } - let use_item = make::use_(None, use_tree).clone_for_update(); + let use_item = make::use_(None, None, use_tree).clone_for_update(); for attr in scope.required_cfgs.iter().map(|attr| attr.syntax().clone_subtree().clone_for_update()) { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs index 4a00854f01a5f..3350e1c3d207f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs @@ -782,18 +782,18 @@ fn merge_groups_long_last_list() { fn merge_groups_long_full_nested() { check_crate( "std::foo::bar::Baz", - r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};", - r"use std::foo::bar::{quux::{Fez, Fizz}, Baz, Qux};", + r"use std::foo::bar::{quux::{Fez, Fizz}, Qux};", + r"use std::foo::bar::{Baz, Qux, quux::{Fez, Fizz}};", ); check_crate( "std::foo::bar::r#Baz", - r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};", - r"use std::foo::bar::{quux::{Fez, Fizz}, r#Baz, Qux};", + r"use std::foo::bar::{quux::{Fez, Fizz}, Qux};", + r"use std::foo::bar::{r#Baz, Qux, quux::{Fez, Fizz}};", ); check_one( "std::foo::bar::Baz", - r"use {std::foo::bar::{Qux, quux::{Fez, Fizz}}};", - r"use {std::foo::bar::{quux::{Fez, Fizz}, Baz, Qux}};", + r"use {std::foo::bar::{quux::{Fez, Fizz}}, Qux};", + r"use {Qux, std::foo::bar::{Baz, quux::{Fez, Fizz}}};", ); } @@ -811,13 +811,13 @@ use std::foo::bar::{Qux, quux::{Fez, Fizz}};", fn merge_groups_full_nested_deep() { check_crate( "std::foo::bar::quux::Baz", - r"use std::foo::bar::{Qux, quux::{Fez, Fizz}};", - r"use std::foo::bar::{quux::{Baz, Fez, Fizz}, Qux};", + r"use std::foo::bar::{quux::{Fez, Fizz}, Qux};", + r"use std::foo::bar::{Qux, quux::{Baz, Fez, Fizz}};", ); check_one( "std::foo::bar::quux::Baz", - r"use {std::foo::bar::{Qux, quux::{Fez, Fizz}}};", - r"use {std::foo::bar::{quux::{Baz, Fez, Fizz}, Qux}};", + r"use {std::foo::bar::{quux::{Fez, Fizz}}, Qux};", + r"use {Qux, std::foo::bar::quux::{Baz, Fez, Fizz}};", ); } @@ -988,8 +988,8 @@ use syntax::SyntaxKind::{self, *};", fn merge_glob_nested() { check_crate( "foo::bar::quux::Fez", - r"use foo::bar::{Baz, quux::*};", - r"use foo::bar::{quux::{Fez, *}, Baz};", + r"use foo::bar::{quux::*, Baz};", + r"use foo::bar::{Baz, quux::{Fez, *}};", ) } @@ -998,7 +998,7 @@ fn merge_nested_considers_first_segments() { check_crate( "hir_ty::display::write_bounds_like_dyn_trait", r"use hir_ty::{autoderef, display::{HirDisplayError, HirFormatter}, method_resolution};", - r"use hir_ty::{autoderef, display::{write_bounds_like_dyn_trait, HirDisplayError, HirFormatter}, method_resolution};", + r"use hir_ty::{autoderef, display::{HirDisplayError, HirFormatter, write_bounds_like_dyn_trait}, method_resolution};", ); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs index 61962e593476c..4e779a7d858e5 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs @@ -3,7 +3,6 @@ use std::cmp::Ordering; use itertools::{EitherOrBoth, Itertools}; use parser::T; -use stdx::is_upper_snake_case; use syntax::{ Direction, SyntaxElement, algo, ast::{ @@ -543,12 +542,13 @@ fn use_tree_cmp_bin_search(lhs: &ast::UseTree, rhs: &ast::UseTree) -> Ordering { } } -/// Orders use trees following `rustfmt`'s algorithm for ordering imports, which is `self`, `super` -/// and `crate` first, then identifier imports with lowercase ones first and upper snake case -/// (e.g. UPPER_SNAKE_CASE) ones last, then glob imports, and at last list imports. +/// Orders use trees following `rustfmt`'s version sorting algorithm for ordering imports. /// -/// Example: `foo::{self, baz, foo, Baz, Qux, FOO_BAZ, *, {Bar}}` -/// Ref: . +/// Example: `foo::{self, Baz, FOO_BAZ, Qux, baz, foo, *, {Bar}}` +/// +/// Ref: +/// - +/// - pub(super) fn use_tree_cmp(a: &ast::UseTree, b: &ast::UseTree) -> Ordering { let a_is_simple_path = a.is_simple_path() && a.rename().is_none(); let b_is_simple_path = b.is_simple_path() && b.rename().is_none(); @@ -613,26 +613,9 @@ fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering { (Some(_), None) => Ordering::Greater, (None, Some(_)) => Ordering::Less, (Some(a_name), Some(b_name)) => { - // snake_case < UpperCamelCase < UPPER_SNAKE_CASE let a_text = a_name.as_str().trim_start_matches("r#"); let b_text = b_name.as_str().trim_start_matches("r#"); - if a_text.starts_with(char::is_lowercase) - && b_text.starts_with(char::is_uppercase) - { - return Ordering::Less; - } - if a_text.starts_with(char::is_uppercase) - && b_text.starts_with(char::is_lowercase) - { - return Ordering::Greater; - } - if !is_upper_snake_case(a_text) && is_upper_snake_case(b_text) { - return Ordering::Less; - } - if is_upper_snake_case(a_text) && !is_upper_snake_case(b_text) { - return Ordering::Greater; - } - a_text.cmp(b_text) + version_sort::version_sort(a_text, b_text) } } } @@ -740,3 +723,189 @@ fn remove_subtree_if_only_self(use_tree: &ast::UseTree) { _ => (), } } + +// Taken from rustfmt +// https://github.com/rust-lang/rustfmt/blob/0332da01486508710f2a542111e40513bfb215aa/src/sort.rs +mod version_sort { + // Original rustfmt code contains some clippy lints. + // Suppress them to minimize changes from upstream. + #![allow(clippy::all)] + + use std::cmp::Ordering; + + use itertools::{EitherOrBoth, Itertools}; + + struct VersionChunkIter<'a> { + ident: &'a str, + start: usize, + } + + impl<'a> VersionChunkIter<'a> { + pub(crate) fn new(ident: &'a str) -> Self { + Self { ident, start: 0 } + } + + fn parse_numeric_chunk( + &mut self, + mut chars: std::str::CharIndices<'a>, + ) -> Option> { + let mut end = self.start; + let mut is_end_of_chunk = false; + + while let Some((idx, c)) = chars.next() { + end = self.start + idx; + + if c.is_ascii_digit() { + continue; + } + + is_end_of_chunk = true; + break; + } + + let source = if is_end_of_chunk { + let value = &self.ident[self.start..end]; + self.start = end; + value + } else { + let value = &self.ident[self.start..]; + self.start = self.ident.len(); + value + }; + + let zeros = source.chars().take_while(|c| *c == '0').count(); + let value = source.parse::().ok()?; + + Some(VersionChunk::Number { value, zeros, source }) + } + + fn parse_str_chunk( + &mut self, + mut chars: std::str::CharIndices<'a>, + ) -> Option> { + let mut end = self.start; + let mut is_end_of_chunk = false; + + while let Some((idx, c)) = chars.next() { + end = self.start + idx; + + if c == '_' { + is_end_of_chunk = true; + break; + } + + if !c.is_ascii_digit() { + continue; + } + + is_end_of_chunk = true; + break; + } + + let source = if is_end_of_chunk { + let value = &self.ident[self.start..end]; + self.start = end; + value + } else { + let value = &self.ident[self.start..]; + self.start = self.ident.len(); + value + }; + + Some(VersionChunk::Str(source)) + } + } + + impl<'a> Iterator for VersionChunkIter<'a> { + type Item = VersionChunk<'a>; + + fn next(&mut self) -> Option { + let mut chars = self.ident[self.start..].char_indices(); + let (_, next) = chars.next()?; + + if next == '_' { + self.start = self.start + next.len_utf8(); + return Some(VersionChunk::Underscore); + } + + if next.is_ascii_digit() { + return self.parse_numeric_chunk(chars); + } + + self.parse_str_chunk(chars) + } + } + + /// Represents a chunk in the version-sort algorithm + #[derive(Debug, PartialEq, Eq)] + enum VersionChunk<'a> { + /// A single `_` in an identifier. Underscores are sorted before all other characters. + Underscore, + /// A &str chunk in the version sort. + Str(&'a str), + /// A numeric chunk in the version sort. Keeps track of the numeric value and leading zeros. + Number { value: usize, zeros: usize, source: &'a str }, + } + + /// Determine which side of the version-sort comparison had more leading zeros. + #[derive(Debug, PartialEq, Eq)] + enum MoreLeadingZeros { + Left, + Right, + Equal, + } + + pub(super) fn version_sort(a: &str, b: &str) -> Ordering { + let iter_a = VersionChunkIter::new(a); + let iter_b = VersionChunkIter::new(b); + let mut more_leading_zeros = MoreLeadingZeros::Equal; + + for either_or_both in iter_a.zip_longest(iter_b) { + match either_or_both { + EitherOrBoth::Left(_) => return std::cmp::Ordering::Greater, + EitherOrBoth::Right(_) => return std::cmp::Ordering::Less, + EitherOrBoth::Both(a, b) => match (a, b) { + (VersionChunk::Underscore, VersionChunk::Underscore) => { + continue; + } + (VersionChunk::Underscore, _) => return std::cmp::Ordering::Less, + (_, VersionChunk::Underscore) => return std::cmp::Ordering::Greater, + (VersionChunk::Str(ca), VersionChunk::Str(cb)) + | (VersionChunk::Str(ca), VersionChunk::Number { source: cb, .. }) + | (VersionChunk::Number { source: ca, .. }, VersionChunk::Str(cb)) => { + match ca.cmp(&cb) { + std::cmp::Ordering::Equal => { + continue; + } + order @ _ => return order, + } + } + ( + VersionChunk::Number { value: va, zeros: lza, .. }, + VersionChunk::Number { value: vb, zeros: lzb, .. }, + ) => match va.cmp(&vb) { + std::cmp::Ordering::Equal => { + if lza == lzb { + continue; + } + + if more_leading_zeros == MoreLeadingZeros::Equal && lza > lzb { + more_leading_zeros = MoreLeadingZeros::Left; + } else if more_leading_zeros == MoreLeadingZeros::Equal && lza < lzb { + more_leading_zeros = MoreLeadingZeros::Right; + } + continue; + } + order @ _ => return order, + }, + }, + } + } + + match more_leading_zeros { + MoreLeadingZeros::Equal => std::cmp::Ordering::Equal, + MoreLeadingZeros::Left => std::cmp::Ordering::Less, + MoreLeadingZeros::Right => std::cmp::Ordering::Greater, + } + } +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 49f7f63a04a42..44bccd86d8709 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -51,7 +51,7 @@ use salsa::Durability; use std::{fmt, mem::ManuallyDrop}; use base_db::{ - CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Files, RootQueryDb, + CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Files, Nonce, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, query_group, }; use hir::{ @@ -66,13 +66,9 @@ pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; pub use ::line_index; /// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience. -pub use base_db; +pub use base_db::{self, FxIndexMap, FxIndexSet}; pub use span::{self, FileId}; -pub type FxIndexSet = indexmap::IndexSet>; -pub type FxIndexMap = - indexmap::IndexMap>; - pub type FilePosition = FilePositionWrapper; pub type FileRange = FileRangeWrapper; @@ -87,6 +83,7 @@ pub struct RootDatabase { storage: ManuallyDrop>, files: Arc, crates_map: Arc, + nonce: Nonce, } impl std::panic::RefUnwindSafe for RootDatabase {} @@ -106,6 +103,7 @@ impl Clone for RootDatabase { storage: self.storage.clone(), files: self.files.clone(), crates_map: self.crates_map.clone(), + nonce: Nonce::new(), } } } @@ -169,6 +167,10 @@ impl SourceDatabase for RootDatabase { fn crates_map(&self) -> Arc { self.crates_map.clone() } + + fn nonce_and_revision(&self) -> (Nonce, salsa::Revision) { + (self.nonce, salsa::plumbing::ZalsaDatabase::zalsa(self).current_revision()) + } } impl Default for RootDatabase { @@ -183,6 +185,7 @@ impl RootDatabase { storage: ManuallyDrop::new(salsa::Storage::default()), files: Default::default(), crates_map: Default::default(), + nonce: Nonce::new(), }; // This needs to be here otherwise `CrateGraphBuilder` will panic. db.set_all_crates(Arc::new(Box::new([]))); @@ -273,7 +276,6 @@ pub enum SymbolKind { Struct, ToolModule, Trait, - TraitAlias, TypeAlias, TypeParam, Union, @@ -306,7 +308,6 @@ impl From for SymbolKind { hir::ModuleDef::Adt(hir::Adt::Enum(..)) => SymbolKind::Enum, hir::ModuleDef::Adt(hir::Adt::Union(..)) => SymbolKind::Union, hir::ModuleDef::Trait(..) => SymbolKind::Trait, - hir::ModuleDef::TraitAlias(..) => SymbolKind::TraitAlias, hir::ModuleDef::TypeAlias(..) => SymbolKind::TypeAlias, hir::ModuleDef::BuiltinType(..) => SymbolKind::TypeAlias, } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 5d88afec50951..4a27035afd091 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -3,7 +3,7 @@ use crate::helpers::mod_path_to_ast; use either::Either; use hir::{ - AsAssocItem, HirDisplay, HirFileId, ImportPathConfig, ModuleDef, SemanticsScope, + AsAssocItem, FindPathConfig, HirDisplay, HirFileId, ModuleDef, SemanticsScope, prettify_macro_expansion, }; use itertools::Itertools; @@ -11,7 +11,7 @@ use rustc_hash::FxHashMap; use span::Edition; use syntax::{ NodeOrToken, SyntaxNode, - ast::{self, AstNode, HasGenericArgs, make}, + ast::{self, AstNode, HasGenericArgs, HasName, make}, syntax_editor::{self, SyntaxEditor}, }; @@ -315,32 +315,49 @@ impl Ctx<'_> { } fn transform_path(&self, path: &SyntaxNode) -> SyntaxNode { - fn find_child_paths(root_path: &SyntaxNode) -> Vec { - let mut result = Vec::new(); + fn find_child_paths_and_ident_pats( + root_path: &SyntaxNode, + ) -> Vec> { + let mut result: Vec> = Vec::new(); for child in root_path.children() { if let Some(child_path) = ast::Path::cast(child.clone()) { - result.push(child_path); + result.push(either::Left(child_path)); + } else if let Some(child_ident_pat) = ast::IdentPat::cast(child.clone()) { + result.push(either::Right(child_ident_pat)); } else { - result.extend(find_child_paths(&child)); + result.extend(find_child_paths_and_ident_pats(&child)); } } result } + let root_path = path.clone_subtree(); - let result = find_child_paths(&root_path); + + let result = find_child_paths_and_ident_pats(&root_path); let mut editor = SyntaxEditor::new(root_path.clone()); for sub_path in result { let new = self.transform_path(sub_path.syntax()); editor.replace(sub_path.syntax(), new); } + let update_sub_item = editor.finish().new_root().clone().clone_subtree(); - let item = find_child_paths(&update_sub_item); + let item = find_child_paths_and_ident_pats(&update_sub_item); let mut editor = SyntaxEditor::new(update_sub_item); for sub_path in item { - self.transform_path_(&mut editor, &sub_path); + self.transform_path_or_ident_pat(&mut editor, &sub_path); } editor.finish().new_root().clone() } + fn transform_path_or_ident_pat( + &self, + editor: &mut SyntaxEditor, + item: &Either, + ) -> Option<()> { + match item { + Either::Left(path) => self.transform_path_(editor, path), + Either::Right(ident_pat) => self.transform_ident_pat(editor, ident_pat), + } + } fn transform_path_(&self, editor: &mut SyntaxEditor, path: &ast::Path) -> Option<()> { if path.qualifier().is_some() { @@ -375,7 +392,7 @@ impl Ctx<'_> { parent.segment()?.name_ref()?, ) .and_then(|trait_ref| { - let cfg = ImportPathConfig { + let cfg = FindPathConfig { prefer_no_std: false, prefer_prelude: true, prefer_absolute: false, @@ -435,7 +452,7 @@ impl Ctx<'_> { return None; } - let cfg = ImportPathConfig { + let cfg = FindPathConfig { prefer_no_std: false, prefer_prelude: true, prefer_absolute: false, @@ -484,7 +501,7 @@ impl Ctx<'_> { if let Some(adt) = ty.as_adt() && let ast::Type::PathType(path_ty) = &ast_ty { - let cfg = ImportPathConfig { + let cfg = FindPathConfig { prefer_no_std: false, prefer_prelude: true, prefer_absolute: false, @@ -515,6 +532,34 @@ impl Ctx<'_> { } Some(()) } + + fn transform_ident_pat( + &self, + editor: &mut SyntaxEditor, + ident_pat: &ast::IdentPat, + ) -> Option<()> { + let name = ident_pat.name()?; + + let temp_path = make::path_from_text(&name.text()); + + let resolution = self.source_scope.speculative_resolve(&temp_path)?; + + match resolution { + hir::PathResolution::Def(def) if def.as_assoc_item(self.source_scope.db).is_none() => { + let cfg = FindPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + allow_unstable: true, + }; + let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?; + let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update(); + editor.replace(ident_pat.syntax(), res.syntax()); + Some(()) + } + _ => None, + } + } } // FIXME: It would probably be nicer if we could get this via HIR (i.e. get the diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index 424b27a398b20..a8800c142a22e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -163,7 +163,6 @@ impl Definition { Definition::Const(it) => name_range(it, sema).and_then(syn_ctx_is_root), Definition::Static(it) => name_range(it, sema).and_then(syn_ctx_is_root), Definition::Trait(it) => name_range(it, sema).and_then(syn_ctx_is_root), - Definition::TraitAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root), Definition::TypeAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root), Definition::Local(it) => { name_range(it.primary_source(sema.db), sema).and_then(syn_ctx_is_root) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index abd4dc8300b39..f1d076e874d5c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -352,7 +352,6 @@ impl Definition { hir::GenericDef::Function(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Adt(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Trait(it) => it.source(db).map(|src| src.syntax().cloned()), - hir::GenericDef::TraitAlias(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index 9c4e6f5cbf82f..76b647f8e9f2d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -133,23 +133,27 @@ pub trait SymbolsDatabase: HirDatabase + SourceDatabase { fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc { let _p = tracing::info_span!("library_symbols").entered(); - let mut symbol_collector = SymbolCollector::new(db); - - db.source_root_crates(source_root_id) - .iter() - .flat_map(|&krate| Crate::from(krate).modules(db)) - // we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing, - // as the index for a library is not going to really ever change, and we do not want to store each - // the module or crate indices for those in salsa unless we need to. - .for_each(|module| symbol_collector.collect(module)); - - Arc::new(SymbolIndex::new(symbol_collector.finish())) + // We call this without attaching because this runs in parallel, so we need to attach here. + salsa::attach(db, || { + let mut symbol_collector = SymbolCollector::new(db); + + db.source_root_crates(source_root_id) + .iter() + .flat_map(|&krate| Crate::from(krate).modules(db)) + // we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing, + // as the index for a library is not going to really ever change, and we do not want to store each + // the module or crate indices for those in salsa unless we need to. + .for_each(|module| symbol_collector.collect(module)); + + Arc::new(SymbolIndex::new(symbol_collector.finish())) + }) } fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc { let _p = tracing::info_span!("module_symbols").entered(); - Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module))) + // We call this without attaching because this runs in parallel, so we need to attach here. + salsa::attach(db, || Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module)))) } pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc]> { @@ -357,7 +361,6 @@ impl Query { hir::ModuleDef::Adt(..) | hir::ModuleDef::TypeAlias(..) | hir::ModuleDef::BuiltinType(..) - | hir::ModuleDef::TraitAlias(..) | hir::ModuleDef::Trait(..) ); if non_type_for_type_only_query || !self.matches_assoc_mode(symbol.is_assoc) { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 742d614bc5673..20bfcc2deecee 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -1,7 +1,7 @@ //! This diagnostic provides an assist for creating a struct definition from a JSON //! example. -use hir::{ImportPathConfig, PathResolution, Semantics}; +use hir::{FindPathConfig, PathResolution, Semantics}; use ide_db::text_edit::TextEdit; use ide_db::{ EditionedFileId, FileRange, FxHashMap, RootDatabase, @@ -141,7 +141,7 @@ pub(crate) fn json_in_items( let scope = scb.make_import_scope_mut(import_scope); let current_module = semantics_scope.module(); - let cfg = ImportPathConfig { + let cfg = FindPathConfig { prefer_no_std: config.prefer_no_std, prefer_prelude: config.prefer_prelude, prefer_absolute: config.prefer_absolute, @@ -233,7 +233,7 @@ mod tests { } #[derive(Serialize)] - struct Root1{ bar: f64, bay: i64, baz: (), r#box: bool, foo: String } + struct Root1 { bar: f64, bay: i64, baz: (), r#box: bool, foo: String } "#, ); @@ -252,9 +252,9 @@ mod tests { } "#, r#" - struct Value1{ } - struct Bar1{ kind: String, value: Value1 } - struct Root1{ bar: Bar1, foo: String } + struct Value1 { } + struct Bar1 { kind: String, value: Value1 } + struct Root1 { bar: Bar1, foo: String } "#, ); @@ -284,12 +284,12 @@ mod tests { } "#, r#" - struct Address1{ house: i64, street: String } - struct User1{ address: Address1, email: String } - struct AnotherUser1{ user: User1 } - struct Address2{ house: i64, street: String } - struct User2{ address: Address2, email: String } - struct Root1{ another_user: AnotherUser1, user: User2 } + struct Address1 { house: i64, street: String } + struct User1 { address: Address1, email: String } + struct AnotherUser1 { user: User1 } + struct Address2 { house: i64, street: String } + struct User2 { address: Address2, email: String } + struct Root1 { another_user: AnotherUser1, user: User2 } "#, ); @@ -326,9 +326,9 @@ mod tests { use serde::Deserialize; #[derive(Serialize, Deserialize)] - struct OfObject1{ x: i64, y: i64 } + struct OfObject1 { x: i64, y: i64 } #[derive(Serialize, Deserialize)] - struct Root1{ empty: Vec<_>, nested: Vec>>, of_object: Vec, of_string: Vec } + struct Root1 { empty: Vec<_>, nested: Vec>>, of_object: Vec, of_string: Vec } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index c39e00e178f81..6a1ecae651501 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -144,16 +144,13 @@ macro_rules! concat { () => {} } } #[test] - fn register_attr_and_tool() { - cov_mark::check!(register_attr); + fn register_tool() { cov_mark::check!(register_tool); check_diagnostics( r#" #![register_tool(tool)] -#![register_attr(attr)] #[tool::path] -#[attr] struct S; "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 893bfca6a1298..49f925e2e0c93 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -1,6 +1,6 @@ use either::Either; use hir::{ - AssocItem, HirDisplay, ImportPathConfig, InFile, Type, + AssocItem, FindPathConfig, HirDisplay, InFile, Type, db::{ExpandDatabase, HirDatabase}, sym, }; @@ -132,7 +132,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option) -> Y { fn no_false_positive_dyn_fn() { check_diagnostics( r#" -//- minicore: copy, fn +//- minicore: copy, fn, dispatch_from_dyn fn f(x: &mut &mut dyn Fn()) { x(); } @@ -166,7 +166,7 @@ struct X<'a> { field: &'a mut dyn Fn(), } -fn f(x: &mut X<'_>) { +fn g(x: &mut X<'_>) { (x.field)(); } "#, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index ac54ac0950f39..8613581292f75 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -526,8 +526,7 @@ fn main() { fn run(_t: Rate<5>) { } fn main() { - run(f()) // FIXME: remove this error - //^^^ error: expected Rate<5>, found Rate<_> + run(f()) } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs index 8d42770269057..577c582a2080d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -1,7 +1,7 @@ use std::ops::Not; use hir::{ - ClosureStyle, HirDisplay, ImportPathConfig, + ClosureStyle, FindPathConfig, HirDisplay, db::ExpandDatabase, term_search::{TermSearchConfig, TermSearchCtx, term_search}, }; @@ -73,7 +73,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole<'_>) -> Option() -> T { loop {} } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 690158989679b..358e0c43c2a93 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -1,5 +1,3 @@ -use std::iter; - use either::Either; use hir::{Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union, db::ExpandDatabase}; use ide_db::text_edit::TextEdit; @@ -194,17 +192,20 @@ fn add_field_to_struct_fix( Some(make::visibility_pub_crate()) }; // FIXME: Allow for choosing a visibility modifier see https://github.com/rust-lang/rust-analyzer/issues/11563 - let indent = IndentLevel::from_node(struct_syntax.value) + 1; + let indent = IndentLevel::from_node(struct_syntax.value); - let field = make::record_field(visibility, field_name, suggested_type).indent(indent); - let record_field_list = make::record_field_list(iter::once(field)); + let field = + make::record_field(visibility, field_name, suggested_type).indent(indent + 1); // A Unit Struct with no `;` is invalid syntax. We should not suggest this fix. let semi_colon = algo::skip_trivia_token(struct_syntax.value.last_token()?, Direction::Prev)?; if semi_colon.kind() != SyntaxKind::SEMICOLON { return None; } - src_change_builder.replace(semi_colon.text_range(), record_field_list.to_string()); + src_change_builder.replace( + semi_colon.text_range(), + format!(" {{\n{}{field},\n{indent}}}", indent + 1), + ); Some(Assist { id: AssistId::quick_fix("convert-unit-struct-to-record-struct"), @@ -230,7 +231,7 @@ fn record_field_layout( field_list: ast::RecordFieldList, struct_syntax: &SyntaxNode, ) -> Option<(TextSize, String)> { - let (offset, needs_comma, trailing_new_line, indent) = match field_list.fields().last() { + let (offset, needs_comma, indent) = match field_list.fields().last() { Some(record_field) => { let syntax = algo::skip_trivia_token(field_list.r_curly_token()?, Direction::Prev)?; @@ -239,19 +240,22 @@ fn record_field_layout( ( last_field_syntax.text_range().end(), syntax.kind() != SyntaxKind::COMMA, - false, last_field_indent, ) } // Empty Struct. Add a field right before the closing brace None => { let indent = IndentLevel::from_node(struct_syntax) + 1; - let offset = field_list.r_curly_token()?.text_range().start(); - (offset, false, true, indent) + let offset = field_list.l_curly_token()?.text_range().end(); + (offset, false, indent) } }; - let comma = if needs_comma { ",\n" } else { "" }; - let trailing_new_line = if trailing_new_line { "\n" } else { "" }; + let trailing_new_line = if !field_list.syntax().text().contains_char('\n') { + format!("\n{}", field_list.indent_level()) + } else { + String::new() + }; + let comma = if needs_comma { ",\n" } else { "\n" }; let record_field = make::record_field(visibility, name, suggested_type); Some((offset, format!("{comma}{indent}{record_field}{trailing_new_line}"))) @@ -377,18 +381,24 @@ fn foo() { fn unresolved_field_fix_on_unit() { check_fix( r#" + mod indent { struct Foo; fn foo() { Foo.bar$0; } + } "#, r#" - struct Foo{ bar: () } + mod indent { + struct Foo { + bar: (), + } fn foo() { Foo.bar; } + } "#, ); } @@ -396,6 +406,7 @@ fn foo() { fn unresolved_field_fix_on_empty() { check_fix( r#" + mod indent { struct Foo{ } @@ -403,8 +414,10 @@ fn foo() { let foo = Foo{}; foo.bar$0; } + } "#, r#" + mod indent { struct Foo{ bar: () } @@ -413,6 +426,32 @@ fn foo() { let foo = Foo{}; foo.bar; } + } + "#, + ); + + check_fix( + r#" + mod indent { + struct Foo {} + + fn foo() { + let foo = Foo{}; + foo.bar$0; + } + } + "#, + r#" + mod indent { + struct Foo { + bar: () + } + + fn foo() { + let foo = Foo{}; + foo.bar; + } + } "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index dcca85d4db33e..bd5d134348e27 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -268,7 +268,7 @@ impl A { } fn main() { let a = A {a: 0, b: ""}; - A::::foo(); + A::::foo(); } "#, ); @@ -351,4 +351,26 @@ fn foo() { "#, ); } + + #[test] + fn iter_collect() { + check_diagnostics( + r#" +//- minicore: unsize, coerce_unsized, iterator, iterators, sized +struct Map(K, V); +impl FromIterator<(K, V)> for Map { + fn from_iter>(_iter: T) -> Self { + loop {} + } +} + +fn foo() -> Map { + [ + (123, &["abc", "def"] as _), + (456, &["ghi"] as _), + ].into_iter().collect() +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index 181993154e59f..1839ab1c58c1e 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -2,10 +2,11 @@ mod overly_long_real_world_cases; +use hir::setup_tracing; use ide_db::{ LineIndexDatabase, RootDatabase, assists::{AssistResolveStrategy, ExprFillDefaultMode}, - base_db::SourceDatabase, + base_db::{SourceDatabase, salsa}, }; use itertools::Itertools; use stdx::trim_indent; @@ -73,14 +74,16 @@ fn check_nth_fix_with_config( let after = trim_indent(ra_fixture_after); let (db, file_position) = RootDatabase::with_position(ra_fixture_before); - let diagnostic = super::full_diagnostics( - &db, - &config, - &AssistResolveStrategy::All, - file_position.file_id.file_id(&db), - ) - .pop() - .expect("no diagnostics"); + let diagnostic = salsa::attach(&db, || { + super::full_diagnostics( + &db, + &config, + &AssistResolveStrategy::All, + file_position.file_id.file_id(&db), + ) + .pop() + .expect("no diagnostics") + }); let fix = &diagnostic .fixes .unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth]; @@ -126,12 +129,14 @@ pub(crate) fn check_has_fix( let (db, file_position) = RootDatabase::with_position(ra_fixture_before); let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; - let fix = super::full_diagnostics( - &db, - &conf, - &AssistResolveStrategy::All, - file_position.file_id.file_id(&db), - ) + let fix = salsa::attach(&db, || { + super::full_diagnostics( + &db, + &conf, + &AssistResolveStrategy::All, + file_position.file_id.file_id(&db), + ) + }) .into_iter() .find(|d| { d.fixes @@ -165,12 +170,14 @@ pub(crate) fn check_has_fix( /// Checks that there's a diagnostic *without* fix at `$0`. pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); - let diagnostic = super::full_diagnostics( - &db, - &DiagnosticsConfig::test_sample(), - &AssistResolveStrategy::All, - file_position.file_id.file_id(&db), - ) + let diagnostic = salsa::attach(&db, || { + super::full_diagnostics( + &db, + &DiagnosticsConfig::test_sample(), + &AssistResolveStrategy::All, + file_position.file_id.file_id(&db), + ) + }) .pop() .unwrap(); assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}"); @@ -198,12 +205,20 @@ pub(crate) fn check_diagnostics_with_config( config: DiagnosticsConfig, #[rust_analyzer::rust_fixture] ra_fixture: &str, ) { + let _tracing = setup_tracing(); + let (db, files) = RootDatabase::with_many_files(ra_fixture); let mut annotations = files .iter() .copied() .flat_map(|file_id| { - super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.file_id(&db)) + salsa::attach(&db, || { + super::full_diagnostics( + &db, + &config, + &AssistResolveStrategy::All, + file_id.file_id(&db), + ) .into_iter() .map(|d| { let mut annotation = String::new(); @@ -221,6 +236,7 @@ pub(crate) fn check_diagnostics_with_config( annotation.push_str(&d.message); (d.range, annotation) }) + }) }) .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation))) .into_group_map(); @@ -272,15 +288,19 @@ fn test_disabled_diagnostics() { let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); let file_id = file_id.file_id(&db); - let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); + let diagnostics = salsa::attach(&db, || { + super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id) + }); assert!(diagnostics.is_empty()); - let diagnostics = super::full_diagnostics( - &db, - &DiagnosticsConfig::test_sample(), - &AssistResolveStrategy::All, - file_id, - ); + let diagnostics = salsa::attach(&db, || { + super::full_diagnostics( + &db, + &DiagnosticsConfig::test_sample(), + &AssistResolveStrategy::All, + file_id, + ) + }); assert!(!diagnostics.is_empty()); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs index f21132c297ee8..595f0bb5fa8af 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs @@ -6,7 +6,7 @@ use crate::{ parsing::{Constraint, NodeKind, Placeholder, Var}, resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, }; -use hir::{FileRange, ImportPathConfig, Semantics}; +use hir::{FileRange, FindPathConfig, Semantics}; use ide_db::{FxHashMap, base_db::RootQueryDb}; use std::{cell::Cell, iter::Peekable}; use syntax::{ @@ -661,7 +661,7 @@ impl Match { .module(); for (path, resolved_path) in &template.resolved_paths { if let hir::PathResolution::Def(module_def) = resolved_path.resolution { - let cfg = ImportPathConfig { + let cfg = FindPathConfig { prefer_no_std: false, prefer_prelude: true, prefer_absolute: false, diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs index a4e2cfbaee27d..1d5f5adf2eefe 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs @@ -1,7 +1,7 @@ //! This module is responsible for resolving paths within rules. use hir::AsAssocItem; -use ide_db::FxHashMap; +use ide_db::{FxHashMap, base_db::salsa}; use parsing::Placeholder; use syntax::{ SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, @@ -48,16 +48,20 @@ impl<'db> ResolvedRule<'db> { resolution_scope: &ResolutionScope<'db>, index: usize, ) -> Result, SsrError> { - let resolver = - Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in }; - let resolved_template = match rule.template { - Some(template) => Some(resolver.resolve_pattern_tree(template)?), - None => None, - }; - Ok(ResolvedRule { - pattern: resolver.resolve_pattern_tree(rule.pattern)?, - template: resolved_template, - index, + salsa::attach(resolution_scope.scope.db, || { + let resolver = Resolver { + resolution_scope, + placeholders_by_stand_in: rule.placeholders_by_stand_in, + }; + let resolved_template = match rule.template { + Some(template) => Some(resolver.resolve_pattern_tree(template)?), + None => None, + }; + Ok(ResolvedRule { + pattern: resolver.resolve_pattern_tree(rule.pattern)?, + template: resolved_template, + index, + }) }) } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs index 46b633b8a3250..875b4d9b06cec 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs @@ -2,7 +2,10 @@ use expect_test::{Expect, expect}; use hir::{FilePosition, FileRange}; use ide_db::{ EditionedFileId, FxHashSet, - base_db::{SourceDatabase, salsa::Durability}, + base_db::{ + SourceDatabase, + salsa::{self, Durability}, + }, }; use test_utils::RangeOrOffset; use triomphe::Arc; @@ -116,7 +119,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { let rule: SsrRule = rule.parse().unwrap(); match_finder.add_rule(rule).unwrap(); } - let edits = match_finder.edits(); + let edits = salsa::attach(&db, || match_finder.edits()); if edits.is_empty() { panic!("No edits were made"); } @@ -155,8 +158,12 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { ) .unwrap(); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); - let matched_strings: Vec = - match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect(); + let matched_strings: Vec = salsa::attach(&db, || match_finder.matches()) + .flattened() + .matches + .iter() + .map(|m| m.matched_text()) + .collect(); if matched_strings != expected && !expected.is_empty() { print_match_debug_info(&match_finder, position.file_id, expected[0]); } diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs index 7a0405939d10c..f42cead3501d1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs +++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs @@ -67,7 +67,7 @@ pub(crate) fn incoming_calls( let def = ast::Fn::cast(node).and_then(|fn_| sema.to_def(&fn_))?; // We should return def before check if it is a test, so that we // will not continue to search for outer fn in nested fns - def.try_to_nav(sema.db).map(|nav| (def, nav)) + def.try_to_nav(sema).map(|nav| (def, nav)) }); if let Some((def, nav)) = def_nav { @@ -122,10 +122,10 @@ pub(crate) fn outgoing_calls( if exclude_tests && it.is_test(db) { return None; } - it.try_to_nav(db) + it.try_to_nav(&sema) } - hir::CallableKind::TupleEnumVariant(it) => it.try_to_nav(db), - hir::CallableKind::TupleStruct(it) => it.try_to_nav(db), + hir::CallableKind::TupleEnumVariant(it) => it.try_to_nav(&sema), + hir::CallableKind::TupleStruct(it) => it.try_to_nav(&sema), _ => None, } .zip(Some(sema.original_range(expr.syntax()))) @@ -136,7 +136,7 @@ pub(crate) fn outgoing_calls( return None; } function - .try_to_nav(db) + .try_to_nav(&sema) .zip(Some(sema.original_range(expr.name_ref()?.syntax()))) } }?; diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index a5d9a10d2e5fe..c197d559aa89a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -225,7 +225,6 @@ pub(crate) fn resolve_doc_path_for_def( Definition::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), Definition::Static(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), Definition::Trait(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), - Definition::TraitAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), Definition::Macro(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), Definition::Field(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), @@ -390,7 +389,8 @@ fn get_doc_links( let (mut web_url, mut local_url) = get_doc_base_urls(db, target, target_dir, sysroot); - if let Some(path) = mod_path_of_def(db, target) { + let append_mod = !matches!(def, Definition::Macro(m) if m.is_macro_export(db)); + if append_mod && let Some(path) = mod_path_of_def(db, target) { web_url = join_url(web_url, &path); local_url = join_url(local_url, &path); } @@ -670,11 +670,9 @@ fn filename_and_frag_for_def( None => String::from("index.html"), }, Definition::Trait(t) => { + // FIXME(trait-alias): url should be traitalias. for aliases format!("trait.{}.html", t.name(db).as_str()) } - Definition::TraitAlias(t) => { - format!("traitalias.{}.html", t.name(db).as_str()) - } Definition::TypeAlias(t) => { format!("type.{}.html", t.name(db).as_str()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs index 6af156fa668f5..72436307d2cee 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs @@ -4,6 +4,7 @@ use expect_test::{Expect, expect}; use hir::Semantics; use ide_db::{ FilePosition, FileRange, RootDatabase, + base_db::salsa, defs::Definition, documentation::{DocsRangeMap, Documentation, HasDocs}, }; @@ -46,7 +47,8 @@ fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect let (analysis, position) = fixture::position(ra_fixture); let sema = &Semantics::new(&analysis.db); let (cursor_def, docs, range) = def_under_cursor(sema, &position); - let res = rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range)); + let res = + salsa::attach(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range))); expect.assert_eq(&res) } @@ -63,9 +65,11 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) { .flat_map(|(text_range, link, ns)| { let attr = range.map(text_range); let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false); - let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr) - .unwrap_or_else(|| panic!("Failed to resolve {link}")); - def.try_to_nav(sema.db).unwrap().into_iter().zip(iter::repeat(link)) + let def = salsa::attach(sema.db, || { + resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr) + .unwrap_or_else(|| panic!("Failed to resolve {link}")) + }); + def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link)) }) .map(|(nav_target, link)| { let range = @@ -414,6 +418,30 @@ fn foo() { ) } +#[test] +fn external_docs_macro_export() { + check_external_docs( + r#" +//- /lib.rs crate:foo +pub mod inner { + #[macro_export] + macro_rules! my_macro { + () => {}; + } +} + +//- /main.rs crate:bar deps:foo +fn main() { + foo::my_m$0acro!(); +} + "#, + Some("/home/user/project"), + Some(expect![[r#"https://docs.rs/foo/*/foo/macro.my_macro.html"#]]), + Some(expect![[r#"file:///home/user/project/doc/foo/macro.my_macro.html"#]]), + Some("/sysroot"), + ); +} + #[test] fn doc_links_items_simple() { check_doc_links( diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index ad84eacfb3e88..094a4a7036c40 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -1,5 +1,5 @@ use hir::db::ExpandDatabase; -use hir::{ExpandResult, InFile, InRealFile, Semantics}; +use hir::{ExpandResult, InFile, Semantics}; use ide_db::{ FileId, RootDatabase, base_db::Crate, helpers::pick_best_token, syntax_helpers::prettify_macro_expansion, @@ -87,52 +87,55 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< return derive; } - let mut anc = sema - .descend_token_into_include_expansion(InRealFile::new(file_id, tok)) - .value - .parent_ancestors(); - let mut span_map = SpanMap::empty(); - let mut error = String::new(); - let (name, expanded, kind) = loop { - let node = anc.next()?; - - if let Some(item) = ast::Item::cast(node.clone()) - && let Some(def) = sema.resolve_attr_macro_call(&item) - { - break ( - def.name(db).display(db, file_id.edition(db)).to_string(), - expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?, - SyntaxKind::MACRO_ITEMS, - ); - } - if let Some(mac) = ast::MacroCall::cast(node) { - let mut name = mac.path()?.segment()?.name_ref()?.to_string(); - name.push('!'); - let syntax_kind = - mac.syntax().parent().map(|it| it.kind()).unwrap_or(SyntaxKind::MACRO_ITEMS); - break ( - name, - expand_macro_recur( - &sema, - &ast::Item::MacroCall(mac), - &mut error, - &mut span_map, - TextSize::new(0), - )?, - syntax_kind, - ); - } - }; + let syntax_token = sema.descend_into_macros_exact(tok); + 'tokens: for syntax_token in syntax_token { + let mut anc = syntax_token.parent_ancestors(); + let mut span_map = SpanMap::empty(); + let mut error = String::new(); + let (name, expanded, kind) = loop { + let Some(node) = anc.next() else { + continue 'tokens; + }; + + if let Some(item) = ast::Item::cast(node.clone()) + && let Some(def) = sema.resolve_attr_macro_call(&item) + { + break ( + def.name(db).display(db, file_id.edition(db)).to_string(), + expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?, + SyntaxKind::MACRO_ITEMS, + ); + } + if let Some(mac) = ast::MacroCall::cast(node) { + let mut name = mac.path()?.segment()?.name_ref()?.to_string(); + name.push('!'); + let syntax_kind = + mac.syntax().parent().map(|it| it.kind()).unwrap_or(SyntaxKind::MACRO_ITEMS); + break ( + name, + expand_macro_recur( + &sema, + &ast::Item::MacroCall(mac), + &mut error, + &mut span_map, + TextSize::new(0), + )?, + syntax_kind, + ); + } + }; - // FIXME: - // macro expansion may lose all white space information - // But we hope someday we can use ra_fmt for that - let mut expansion = format(db, kind, position.file_id, expanded, &span_map, krate); + // FIXME: + // macro expansion may lose all white space information + // But we hope someday we can use ra_fmt for that + let mut expansion = format(db, kind, position.file_id, expanded, &span_map, krate); - if !error.is_empty() { - expansion.insert_str(0, &format!("Expansion had errors:{error}\n\n")); + if !error.is_empty() { + expansion.insert_str(0, &format!("Expansion had errors:{error}\n\n")); + } + return Some(ExpandedMacro { name, expansion }); } - Some(ExpandedMacro { name, expansion }) + None } fn expand_macro_recur( @@ -752,8 +755,8 @@ fn test() { } "#, expect![[r#" - my_concat! - "<>hi""#]], + concat! + "<>""#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs index 6820f99facf2c..21254fc4d6a22 100644 --- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs +++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs @@ -23,6 +23,11 @@ pub enum StructureNodeKind { Region, } +#[derive(Debug, Clone)] +pub struct FileStructureConfig { + pub exclude_locals: bool, +} + // Feature: File Structure // // Provides a tree of the symbols defined in the file. Can be used to @@ -36,21 +41,24 @@ pub enum StructureNodeKind { // | VS Code | Ctrl+Shift+O | // // ![File Structure](https://user-images.githubusercontent.com/48062697/113020654-b42fc800-917a-11eb-8388-e7dc4d92b02e.gif) -pub(crate) fn file_structure(file: &SourceFile) -> Vec { +pub(crate) fn file_structure( + file: &SourceFile, + config: &FileStructureConfig, +) -> Vec { let mut res = Vec::new(); let mut stack = Vec::new(); for event in file.syntax().preorder_with_tokens() { match event { WalkEvent::Enter(NodeOrToken::Node(node)) => { - if let Some(mut symbol) = structure_node(&node) { + if let Some(mut symbol) = structure_node(&node, config) { symbol.parent = stack.last().copied(); stack.push(res.len()); res.push(symbol); } } WalkEvent::Leave(NodeOrToken::Node(node)) => { - if structure_node(&node).is_some() { + if structure_node(&node, config).is_some() { stack.pop().unwrap(); } } @@ -71,7 +79,7 @@ pub(crate) fn file_structure(file: &SourceFile) -> Vec { res } -fn structure_node(node: &SyntaxNode) -> Option { +fn structure_node(node: &SyntaxNode, config: &FileStructureConfig) -> Option { fn decl(node: N, kind: StructureNodeKind) -> Option { decl_with_detail(&node, None, kind) } @@ -154,7 +162,6 @@ fn structure_node(node: &SyntaxNode) -> Option { ast::Enum(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Enum)), ast::Variant(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Variant)), ast::Trait(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Trait)), - ast::TraitAlias(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::TraitAlias)), ast::Module(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Module)), ast::Macro(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Macro)), ast::TypeAlias(it) => decl_with_type_ref(&it, it.ty(), StructureNodeKind::SymbolKind(SymbolKind::TypeAlias)), @@ -187,6 +194,10 @@ fn structure_node(node: &SyntaxNode) -> Option { Some(node) }, ast::LetStmt(it) => { + if config.exclude_locals { + return None; + } + let pat = it.pat()?; let mut label = String::new(); @@ -201,7 +212,6 @@ fn structure_node(node: &SyntaxNode) -> Option { detail: it.ty().map(|ty| ty.to_string()), deprecated: false, }; - Some(node) }, ast::ExternBlock(it) => { @@ -254,9 +264,19 @@ mod tests { use super::*; + const DEFAULT_CONFIG: FileStructureConfig = FileStructureConfig { exclude_locals: true }; + fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { + check_with_config(ra_fixture, &DEFAULT_CONFIG, expect); + } + + fn check_with_config( + #[rust_analyzer::rust_fixture] ra_fixture: &str, + config: &FileStructureConfig, + expect: Expect, + ) { let file = SourceFile::parse(ra_fixture, span::Edition::CURRENT).ok().unwrap(); - let structure = file_structure(&file); + let structure = file_structure(&file, config); expect.assert_debug_eq(&structure) } @@ -532,7 +552,7 @@ fn let_statements() { navigation_range: 251..256, node_range: 245..262, kind: SymbolKind( - TraitAlias, + Trait, ), detail: None, deprecated: false, @@ -701,13 +721,264 @@ fn let_statements() { ), deprecated: false, }, + ] + "#]], + ); + } + + #[test] + fn test_file_structure_include_locals() { + check_with_config( + r#" +struct Foo { + x: i32 +} + +mod m { + fn bar1() {} + fn bar2(t: T) -> T {} + fn bar3(a: A, + b: B) -> Vec< + u32 + > {} +} + +enum E { X, Y(i32) } +type T = (); +static S: i32 = 42; +const C: i32 = 42; +trait Tr {} +trait Alias = Tr; + +macro_rules! mc { + () => {} +} + +fn let_statements() { + let x = 42; + let mut y = x; + let Foo { + .. + } = Foo { x }; + _ = (); + let _ = g(); +} +"#, + &FileStructureConfig { exclude_locals: false }, + expect![[r#" + [ + StructureNode { + parent: None, + label: "Foo", + navigation_range: 8..11, + node_range: 1..26, + kind: SymbolKind( + Struct, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 0, + ), + label: "x", + navigation_range: 18..19, + node_range: 18..24, + kind: SymbolKind( + Field, + ), + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "m", + navigation_range: 32..33, + node_range: 28..158, + kind: SymbolKind( + Module, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar1", + navigation_range: 43..47, + node_range: 40..52, + kind: SymbolKind( + Function, + ), + detail: Some( + "fn()", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar2", + navigation_range: 60..64, + node_range: 57..81, + kind: SymbolKind( + Function, + ), + detail: Some( + "fn(t: T) -> T", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar3", + navigation_range: 89..93, + node_range: 86..156, + kind: SymbolKind( + Function, + ), + detail: Some( + "fn(a: A, b: B) -> Vec< u32 >", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "E", + navigation_range: 165..166, + node_range: 160..180, + kind: SymbolKind( + Enum, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "X", + navigation_range: 169..170, + node_range: 169..170, + kind: SymbolKind( + Variant, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "Y", + navigation_range: 172..173, + node_range: 172..178, + kind: SymbolKind( + Variant, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "T", + navigation_range: 186..187, + node_range: 181..193, + kind: SymbolKind( + TypeAlias, + ), + detail: Some( + "()", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "S", + navigation_range: 201..202, + node_range: 194..213, + kind: SymbolKind( + Static, + ), + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "C", + navigation_range: 220..221, + node_range: 214..232, + kind: SymbolKind( + Const, + ), + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "Tr", + navigation_range: 239..241, + node_range: 233..244, + kind: SymbolKind( + Trait, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "Alias", + navigation_range: 251..256, + node_range: 245..262, + kind: SymbolKind( + Trait, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mc", + navigation_range: 277..279, + node_range: 264..296, + kind: SymbolKind( + Macro, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "let_statements", + navigation_range: 301..315, + node_range: 298..429, + kind: SymbolKind( + Function, + ), + detail: Some( + "fn()", + ), + deprecated: false, + }, StructureNode { parent: Some( - 27, + 15, ), label: "x", - navigation_range: 684..685, - node_range: 680..691, + navigation_range: 328..329, + node_range: 324..335, kind: SymbolKind( Local, ), @@ -716,11 +987,11 @@ fn let_statements() { }, StructureNode { parent: Some( - 27, + 15, ), label: "mut y", - navigation_range: 700..705, - node_range: 696..710, + navigation_range: 344..349, + node_range: 340..354, kind: SymbolKind( Local, ), @@ -729,11 +1000,11 @@ fn let_statements() { }, StructureNode { parent: Some( - 27, + 15, ), label: "Foo { .. }", - navigation_range: 719..741, - node_range: 715..754, + navigation_range: 363..385, + node_range: 359..398, kind: SymbolKind( Local, ), @@ -742,11 +1013,11 @@ fn let_statements() { }, StructureNode { parent: Some( - 27, + 15, ), label: "_", - navigation_range: 804..805, - node_range: 800..812, + navigation_range: 419..420, + node_range: 415..427, kind: SymbolKind( Local, ), diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs old mode 100755 new mode 100644 index ac64413effebf..3969490e8dcf5 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -29,7 +29,6 @@ pub enum FoldKind { Consts, Statics, TypeAliases, - TraitAliases, ExternCrates, // endregion: item runs } @@ -147,11 +146,6 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { res.push(Fold { range, kind: FoldKind::TypeAliases }) } }, - ast::TraitAlias(alias) => { - if let Some(range) = contiguous_range_for_item_group(alias, &mut visited_nodes) { - res.push(Fold { range, kind: FoldKind::TraitAliases }) - } - }, ast::ExternCrate(extern_crate) => { if let Some(range) = contiguous_range_for_item_group(extern_crate, &mut visited_nodes) { res.push(Fold { range, kind: FoldKind::ExternCrates }) @@ -351,7 +345,6 @@ mod tests { FoldKind::ReturnType => "returntype", FoldKind::MatchArm => "matcharm", FoldKind::Function => "function", - FoldKind::TraitAliases => "traitaliases", FoldKind::ExternCrates => "externcrates", }; assert_eq!(kind, &attr.unwrap()); diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs index 267e8ff7128be..686dbe2412933 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs @@ -38,14 +38,14 @@ pub(crate) fn goto_declaration( ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? { NameRefClass::Definition(it, _) => Some(it), NameRefClass::FieldShorthand { field_ref, .. } => - return field_ref.try_to_nav(db), + return field_ref.try_to_nav(&sema), NameRefClass::ExternCrateShorthand { decl, .. } => - return decl.try_to_nav(db), + return decl.try_to_nav(&sema), }, ast::Name(name) => match NameClass::classify(&sema, &name)? { NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), NameClass::PatFieldShorthand { field_ref, .. } => - return field_ref.try_to_nav(db), + return field_ref.try_to_nav(&sema), }, _ => None } @@ -57,14 +57,14 @@ pub(crate) fn goto_declaration( Definition::Const(c) => c.as_assoc_item(db), Definition::TypeAlias(ta) => ta.as_assoc_item(db), Definition::Function(f) => f.as_assoc_item(db), - Definition::ExternCrateDecl(it) => return it.try_to_nav(db), + Definition::ExternCrateDecl(it) => return it.try_to_nav(&sema), _ => None, }?; let trait_ = assoc.implemented_trait(db)?; let name = Some(assoc.name(db)?); let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; - item.try_to_nav(db) + item.try_to_nav(&sema) }) .flatten() .collect(); diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 84e41277390ff..2dcb13d9e7aa1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -62,7 +62,7 @@ pub(crate) fn goto_definition( })?; if let Some(doc_comment) = token_as_doc_comment(&original_token) { return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| { - let nav = def.try_to_nav(db)?; + let nav = def.try_to_nav(sema)?; Some(RangeInfo::new(link_range, nav.collect())) }); } @@ -73,7 +73,7 @@ pub(crate) fn goto_definition( return Some(RangeInfo::new( range, match resolution { - Some(res) => def_to_nav(db, Definition::from(res)), + Some(res) => def_to_nav(sema, Definition::from(res)), None => vec![], }, )); @@ -83,14 +83,14 @@ pub(crate) fn goto_definition( return Some(RangeInfo::new(original_token.text_range(), navs)); } - if let Some(navs) = find_definition_for_known_blanket_dual_impls(sema, &original_token) { - return Some(RangeInfo::new(original_token.text_range(), navs)); - } - let navs = sema .descend_into_macros_no_opaque(original_token.clone(), false) .into_iter() .filter_map(|token| { + if let Some(navs) = find_definition_for_known_blanket_dual_impls(sema, &token.value) { + return Some(navs); + } + let parent = token.value.parent()?; let token_file_id = token.file_id; @@ -121,7 +121,7 @@ pub(crate) fn goto_definition( .collect(); } try_filter_trait_item_definition(sema, &def) - .unwrap_or_else(|| def_to_nav(sema.db, def)) + .unwrap_or_else(|| def_to_nav(sema, def)) }) .collect(), ) @@ -160,7 +160,7 @@ fn find_definition_for_known_blanket_dual_impls( t_f, [return_type.type_arguments().next()?], ) - .map(|f| def_to_nav(sema.db, f.into())); + .map(|f| def_to_nav(sema, f.into())); } hir::AssocItemContainer::Impl(_) => return None, }; @@ -201,7 +201,7 @@ fn find_definition_for_known_blanket_dual_impls( // succeed let _t = f.as_assoc_item(sema.db)?.implemented_trait(sema.db)?; let def = Definition::from(f); - Some(def_to_nav(sema.db, def)) + Some(def_to_nav(sema, def)) } fn try_lookup_include_path( @@ -246,7 +246,7 @@ fn try_lookup_macro_def_in_macro_use( for mod_def in krate.root_module().declarations(sema.db) { if let ModuleDef::Macro(mac) = mod_def && mac.name(sema.db).as_str() == token.text() - && let Some(nav) = mac.try_to_nav(sema.db) + && let Some(nav) = mac.try_to_nav(sema) { return Some(nav.call_site); } @@ -278,7 +278,7 @@ fn try_filter_trait_item_definition( .items(db) .iter() .filter(|itm| discriminant(*itm) == discriminant_value) - .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten()) + .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(sema)).flatten()) .map(|it| it.collect()) } } @@ -347,7 +347,7 @@ fn nav_for_exit_points( match_ast! { match node { ast::Fn(fn_) => { - let mut nav = sema.to_def(&fn_)?.try_to_nav(db)?; + let mut nav = sema.to_def(&fn_)?.try_to_nav(sema)?; // For async token, we navigate to itself, which triggers // VSCode to find the references let focus_token = if matches!(token_kind, T![async]) { @@ -564,8 +564,8 @@ fn nav_for_break_points( Some(navs) } -fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec { - def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default() +fn def_to_nav(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Vec { + def.try_to_nav(sema).map(|it| it.collect()).unwrap_or_default() } fn expr_to_nav( @@ -3275,6 +3275,32 @@ impl From for B { } } +fn f() { + let a = A; + let b: B = a.into$0(); +} + "#, + ); + } + + #[test] + fn into_call_to_from_definition_within_macro() { + check( + r#" +//- proc_macros: identity +//- minicore: from +struct A; + +struct B; + +impl From for B { + fn from(value: A) -> Self { + //^^^^ + B + } +} + +#[proc_macros::identity] fn f() { let a = A; let b: B = a.into$0(); @@ -3918,6 +3944,20 @@ fn main() { _ => {} } } +"#, + ); + } + + #[test] + fn goto_builtin_type() { + check( + r#" +//- /main.rs crate:main deps:std +const _: &str$0 = ""; } + +//- /libstd.rs crate:std +mod prim_str {} +// ^^^^^^^^ "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs index 02d96a6473281..875403c4e32a4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs @@ -86,7 +86,7 @@ pub(crate) fn goto_implementation( fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type<'_>) -> Vec { Impl::all_for_type(sema.db, ty) .into_iter() - .filter_map(|imp| imp.try_to_nav(sema.db)) + .filter_map(|imp| imp.try_to_nav(sema)) .flatten() .collect() } @@ -97,7 +97,7 @@ fn impls_for_trait( ) -> Vec { Impl::all_for_trait(sema.db, trait_) .into_iter() - .filter_map(|imp| imp.try_to_nav(sema.db)) + .filter_map(|imp| imp.try_to_nav(sema)) .flatten() .collect() } @@ -114,7 +114,7 @@ fn impls_for_trait_item( let itm_name = itm.name(sema.db)?; (itm_name == fun_name).then_some(*itm) })?; - item.try_to_nav(sema.db) + item.try_to_nav(sema) }) .flatten() .collect() @@ -234,6 +234,7 @@ impl crate::T for crate::Foo {} ); } + // FIXME(next-solver): it would be nice to be able to also point to `&Foo` #[test] fn goto_implementation_all_impls() { check( @@ -246,7 +247,6 @@ impl Foo {} impl T for Foo {} //^^^ impl T for &Foo {} - //^^^^ "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index b80e81d39c6df..ffd144a827e34 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -30,7 +30,7 @@ pub(crate) fn goto_type_definition( let mut res = Vec::new(); let mut push = |def: Definition| { - if let Some(navs) = def.try_to_nav(db) { + if let Some(navs) = def.try_to_nav(&sema) { for nav in navs { if !res.contains(&nav) { res.push(nav); diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 9960e79a5380f..04ce5a7567f3c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -277,7 +277,7 @@ fn highlight_references( Definition::Module(module) => { NavigationTarget::from_module_to_decl(sema.db, module) } - def => match def.try_to_nav(sema.db) { + def => match def.try_to_nav(sema) { Some(it) => it, None => continue, }, @@ -805,10 +805,8 @@ pub(crate) fn highlight_unsafe_points( push_to_highlights(unsafe_token_file_id, Some(unsafe_token.text_range())); // highlight unsafe operations - if let Some(block) = block_expr - && let Some(body) = sema.body_for(InFile::new(unsafe_token_file_id, block.syntax())) - { - let unsafe_ops = sema.get_unsafe_ops(body); + if let Some(block) = block_expr { + let unsafe_ops = sema.get_unsafe_ops_for_unsafe_block(block); for unsafe_op in unsafe_ops { push_to_highlights(unsafe_op.file_id, Some(unsafe_op.value.text_range())); } @@ -2535,4 +2533,21 @@ fn foo() { "#, ); } + + #[test] + fn different_unsafe_block() { + check( + r#" +fn main() { + unsafe$0 { + // ^^^^^^ + *(0 as *const u8) + // ^^^^^^^^^^^^^^^^^ + }; + unsafe { *(1 as *const u8) }; + unsafe { *(2 as *const u8) }; +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 44c98a43f6944..03b9b3677511c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -85,10 +85,11 @@ pub enum HoverAction { impl HoverAction { fn goto_type_from_targets( - db: &RootDatabase, + sema: &Semantics<'_, RootDatabase>, targets: Vec, edition: Edition, ) -> Option { + let db = sema.db; let targets = targets .into_iter() .filter_map(|it| { @@ -99,7 +100,7 @@ impl HoverAction { it.name(db).map(|name| name.display(db, edition).to_string()), edition, ), - nav: it.try_to_nav(db)?.call_site(), + nav: it.try_to_nav(sema)?.call_site(), }) }) .collect::>(); @@ -467,10 +468,10 @@ pub(crate) fn hover_for_definition( HoverResult { markup: render::process_markup(sema.db, def, &markup, range_map, config), actions: [ - show_fn_references_action(sema.db, def), - show_implementations_action(sema.db, def), + show_fn_references_action(sema, def), + show_implementations_action(sema, def), runnable_action(sema, def, file_id), - goto_type_action_for_def(sema.db, def, ¬able_traits, subst_types, edition), + goto_type_action_for_def(sema, def, ¬able_traits, subst_types, edition), ] .into_iter() .flatten() @@ -482,6 +483,12 @@ fn notable_traits<'db>( db: &'db RootDatabase, ty: &hir::Type<'db>, ) -> Vec<(hir::Trait, Vec<(Option>, hir::Name)>)> { + if ty.is_unknown() { + // The trait solver returns "yes" to the question whether the error type + // impls any trait, and we don't want to show it as having any notable trait. + return Vec::new(); + } + db.notable_traits_in_deps(ty.krate(db).into()) .iter() .flat_map(|it| &**it) @@ -505,7 +512,10 @@ fn notable_traits<'db>( .collect::>() } -fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { +fn show_implementations_action( + sema: &Semantics<'_, RootDatabase>, + def: Definition, +) -> Option { fn to_action(nav_target: NavigationTarget) -> HoverAction { HoverAction::Implementation(FilePosition { file_id: nav_target.file_id, @@ -515,19 +525,22 @@ fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { - return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action); + return it.try_to_nav(sema).map(UpmappingResult::call_site).map(to_action); } Definition::Adt(it) => Some(it), - Definition::SelfType(it) => it.self_ty(db).as_adt(), + Definition::SelfType(it) => it.self_ty(sema.db).as_adt(), _ => None, }?; - adt.try_to_nav(db).map(UpmappingResult::call_site).map(to_action) + adt.try_to_nav(sema).map(UpmappingResult::call_site).map(to_action) } -fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option { +fn show_fn_references_action( + sema: &Semantics<'_, RootDatabase>, + def: Definition, +) -> Option { match def { Definition::Function(it) => { - it.try_to_nav(db).map(UpmappingResult::call_site).map(|nav_target| { + it.try_to_nav(sema).map(UpmappingResult::call_site).map(|nav_target| { HoverAction::Reference(FilePosition { file_id: nav_target.file_id, offset: nav_target.focus_or_full_range().start(), @@ -560,12 +573,13 @@ fn runnable_action( } fn goto_type_action_for_def( - db: &RootDatabase, + sema: &Semantics<'_, RootDatabase>, def: Definition, notable_traits: &[(hir::Trait, Vec<(Option>, hir::Name)>)], subst_types: Option)>>, edition: Edition, ) -> Option { + let db = sema.db; let mut targets: Vec = Vec::new(); let mut push_new_def = |item: hir::ModuleDef| { if !targets.contains(&item) { @@ -612,7 +626,7 @@ fn goto_type_action_for_def( } } - HoverAction::goto_type_from_targets(db, targets, edition) + HoverAction::goto_type_from_targets(sema, targets, edition) } fn walk_and_push_ty( diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 51b5900e8155a..65375ed8f78c0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -10,7 +10,7 @@ use hir::{ }; use ide_db::{ RootDatabase, - defs::Definition, + defs::{Definition, find_std_module}, documentation::{DocsRangeMap, HasDocs}, famous_defs::FamousDefs, generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES}, @@ -128,7 +128,7 @@ pub(super) fn try_expr( }; walk_and_push_ty(sema.db, &inner_ty, &mut push_new_def); walk_and_push_ty(sema.db, &body_ty, &mut push_new_def); - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema, targets, edition) { res.actions.push(actions); } @@ -210,7 +210,7 @@ pub(super) fn deref_expr( ) .into() }; - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema, targets, edition) { res.actions.push(actions); } @@ -323,7 +323,7 @@ pub(super) fn struct_rest_pat( Markup::fenced_block(&s) }; - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema, targets, edition) { res.actions.push(actions); } res @@ -361,7 +361,7 @@ pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option match generic_param.parent() { hir::GenericDef::Adt(it) => Some(it.name(db)), hir::GenericDef::Trait(it) => Some(it.name(db)), - hir::GenericDef::TraitAlias(it) => Some(it.name(db)), hir::GenericDef::TypeAlias(it) => Some(it.name(db)), hir::GenericDef::Impl(i) => i.self_ty(db).as_adt().map(|adt| adt.name(db)), @@ -912,7 +911,7 @@ pub(super) fn literal( }; let ty = ty.display(sema.db, display_target); - let mut s = format!("```rust\n{ty}\n```\n___\n\n"); + let mut s = format!("```rust\n{ty}\n```\n---\n\n"); match value { Ok(value) => { let backtick_len = value.chars().filter(|c| *c == '`').count(); @@ -1026,12 +1025,12 @@ fn type_info( if let Some(extra) = render_notable_trait(db, ¬able_traits(db, &original), edition, display_target) { - desc.push_str("\n___\n"); + desc.push_str("\n---\n"); desc.push_str(&extra); }; desc.into() }; - if let Some(actions) = HoverAction::goto_type_from_targets(db, targets, edition) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema, targets, edition) { res.actions.push(actions); } Some(res) @@ -1094,12 +1093,12 @@ fn closure_ty( |_| None, |_| None, ) { - format_to!(markup, "\n___\n{layout}"); + format_to!(markup, "\n---\n{layout}"); } format_to!(markup, "{adjusted}\n\n## Captures\n{}", captures_rendered,); let mut res = HoverResult::default(); - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema, targets, edition) { res.actions.push(actions); } res.markup = markup.into(); @@ -1161,19 +1160,6 @@ fn markup( } } -fn find_std_module( - famous_defs: &FamousDefs<'_, '_>, - name: &str, - edition: Edition, -) -> Option { - let db = famous_defs.0.db; - let std_crate = famous_defs.std()?; - let std_root_module = std_crate.root_module(); - std_root_module.children(db).find(|module| { - module.name(db).is_some_and(|module| module.display(db, edition).to_string() == name) - }) -} - fn render_memory_layout( config: Option, layout: impl FnOnce() -> Result, @@ -1316,7 +1302,7 @@ fn keyword_hints( KeywordHint { description, keyword_mod, - actions: HoverAction::goto_type_from_targets(sema.db, targets, edition) + actions: HoverAction::goto_type_from_targets(sema, targets, edition) .into_iter() .collect(), } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index c5480217a91e2..1ea11a215f83d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -6,6 +6,8 @@ use crate::{ HoverConfig, HoverDocFormat, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, fixture, }; +use hir::setup_tracing; + const HOVER_BASE_CONFIG: HoverConfig = HoverConfig { links_in_hover: false, memory_layout: Some(MemoryLayoutHoverConfig { @@ -38,6 +40,7 @@ fn check_hover_no_result(#[rust_analyzer::rust_fixture] ra_fixture: &str) { #[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { + let _tracing = setup_tracing(); let (analysis, position) = fixture::position(ra_fixture); let hover = analysis .hover( @@ -357,7 +360,7 @@ fn main() { ```rust impl Fn(i32) -> i32 ``` - ___ + --- size = 8, align = 8, niches = 1 ## Captures @@ -380,7 +383,7 @@ fn main() { ```rust impl Fn(i32) -> i32 ``` - ___ + --- size = 0, align = 1 ## Captures @@ -414,7 +417,7 @@ fn main() { ```rust impl FnOnce() ``` - ___ + --- size = 16 (0x10), align = 8, niches = 1 ## Captures @@ -443,7 +446,7 @@ fn main() { ```rust impl FnMut() ``` - ___ + --- size = 8, align = 8, niches = 1 ## Captures @@ -468,7 +471,7 @@ fn main() { ```rust impl FnOnce() -> S2 ``` - ___ + --- size = 8, align = 8, niches = 1 Coerced to: &impl FnOnce() -> S2 @@ -4735,7 +4738,7 @@ fn main() { *value* ```rust - let value: Const<_> + let value: Const<-1> ``` --- @@ -6829,7 +6832,7 @@ fn hover_lint() { ``` arithmetic_overflow ``` - ___ + --- arithmetic operation overflows "#]], @@ -6841,7 +6844,7 @@ fn hover_lint() { ``` arithmetic_overflow ``` - ___ + --- arithmetic operation overflows "#]], @@ -6857,7 +6860,7 @@ fn hover_clippy_lint() { ``` clippy::almost_swapped ``` - ___ + --- Checks for `foo = bar; bar = foo` sequences. "#]], @@ -6869,7 +6872,7 @@ fn hover_clippy_lint() { ``` clippy::almost_swapped ``` - ___ + --- Checks for `foo = bar; bar = foo` sequences. "#]], @@ -7192,7 +7195,7 @@ fn foo() { "#, expect![[r#" ```rust - &'static str + &str ```"#]], ); } @@ -8456,7 +8459,7 @@ format_args!("{aaaaa$0}"); *aaaaa* ```rust - let aaaaa: &'static str + let aaaaa: &str ``` "#]], ); @@ -8476,7 +8479,7 @@ format_args!("{$0aaaaa}"); *aaaaa* ```rust - let aaaaa: &'static str + let aaaaa: &str ``` "#]], ); @@ -8496,7 +8499,7 @@ format_args!(r"{$0aaaaa}"); *aaaaa* ```rust - let aaaaa: &'static str + let aaaaa: &str ``` "#]], ); @@ -8521,7 +8524,7 @@ foo!(r"{$0aaaaa}"); *aaaaa* ```rust - let aaaaa: &'static str + let aaaaa: &str ``` "#]], ); @@ -8567,7 +8570,7 @@ fn main() { ```rust &'static str ``` - ___ + --- value of literal: ` 🦀🦀\A ` "#]], @@ -8583,7 +8586,7 @@ fn main() { ```rust &'static str ``` - ___ + --- value of literal: ` 🦀\u{1f980}\\\x41 ` "#]], @@ -8605,7 +8608,7 @@ fsdghs"; ```rust &'static str ``` - ___ + --- value of literal (truncated up to newline): ` 🦀\u{1f980}\\\x41 ` "#]], @@ -8625,7 +8628,7 @@ fn main() { ```rust &'static {unknown} ``` - ___ + --- value of literal: ` 🦀🦀\A ` "#]], @@ -8644,7 +8647,7 @@ fn main() { ```rust &'static str ``` - ___ + --- value of literal: ```` `[^`]*` ```` "#]], @@ -8659,7 +8662,7 @@ fn main() { ```rust &'static str ``` - ___ + --- value of literal: `` ` `` "#]], @@ -8674,7 +8677,7 @@ fn main() { ```rust &'static str ``` - ___ + --- value of literal: ` ` "#]], @@ -8690,7 +8693,7 @@ fn main() { ```rust &'static str ``` - ___ + --- value of literal: ` Hello World ` "#]], @@ -8710,7 +8713,7 @@ fn main() { ```rust &'static [u8; 5] ``` - ___ + --- value of literal: ` [240, 159, 166, 128, 92] ` "#]], @@ -8726,7 +8729,7 @@ fn main() { ```rust &'static [u8; 18] ``` - ___ + --- value of literal: ` [92, 120, 70, 48, 92, 120, 57, 70, 92, 120, 65, 54, 92, 120, 56, 48, 92, 92] ` "#]], @@ -8746,7 +8749,7 @@ fn main() { ```rust u8 ``` - ___ + --- value of literal: ` 0xF0 ` "#]], @@ -8762,7 +8765,7 @@ fn main() { ```rust u8 ``` - ___ + --- value of literal: ` 0x5C ` "#]], @@ -8782,7 +8785,7 @@ fn main() { ```rust char ``` - ___ + --- value of literal: ` A ` "#]], @@ -8798,7 +8801,7 @@ fn main() { ```rust char ``` - ___ + --- value of literal: ` \ ` "#]], @@ -8814,7 +8817,7 @@ fn main() { ```rust char ``` - ___ + --- value of literal: ` 🦀 ` "#]], @@ -8834,7 +8837,7 @@ fn main() { ```rust f64 ``` - ___ + --- value of literal: ` 1 (bits: 0x3FF0000000000000) ` "#]], @@ -8850,7 +8853,7 @@ fn main() { ```rust f16 ``` - ___ + --- value of literal: ` 1 (bits: 0x3C00) ` "#]], @@ -8866,7 +8869,7 @@ fn main() { ```rust f32 ``` - ___ + --- value of literal: ` 1 (bits: 0x3F800000) ` "#]], @@ -8882,7 +8885,7 @@ fn main() { ```rust f128 ``` - ___ + --- value of literal: ` 1 (bits: 0x3FFF0000000000000000000000000000) ` "#]], @@ -8898,7 +8901,7 @@ fn main() { ```rust f64 ``` - ___ + --- value of literal: ` 134000000000000 (bits: 0x42DE77D399980000) ` "#]], @@ -8914,7 +8917,7 @@ fn main() { ```rust f64 ``` - ___ + --- value of literal: ` 1523527134274733600000000 (bits: 0x44F429E9249F629B) ` "#]], @@ -8930,7 +8933,7 @@ fn main() { ```rust f64 ``` - ___ + --- invalid literal: invalid float literal "#]], @@ -8950,7 +8953,7 @@ fn main() { ```rust i32 ``` - ___ + --- value of literal: ` 34325236457856836345234 (0x744C659178614489D92|0b111010001001100011001011001000101111000011000010100010010001001110110010010) ` "#]], @@ -8966,7 +8969,7 @@ fn main() { ```rust i32 ``` - ___ + --- value of literal: ` 13412342421 (0x31F701A95|0b1100011111011100000001101010010101) ` "#]], @@ -8982,7 +8985,7 @@ fn main() { ```rust i32 ``` - ___ + --- value of literal: ` 306328611 (0x12423423|0b10010010000100011010000100011) ` "#]], @@ -8998,7 +9001,7 @@ fn main() { ```rust i32 ``` - ___ + --- value of literal: ` 255 (0xFF|0b11111111) ` "#]], @@ -9014,7 +9017,7 @@ fn main() { ```rust i32 ``` - ___ + --- value of literal: ` 5349 (0x14E5|0b1010011100101) ` "#]], @@ -9030,7 +9033,7 @@ fn main() { ```rust i32 ``` - ___ + --- invalid literal: number too large to fit in target type "#]], @@ -9186,7 +9189,7 @@ fn main() { ```rust S ``` - ___ + --- Implements notable traits: `Future`, `Iterator`, `Notable`"#]], ); } @@ -10165,7 +10168,7 @@ fn baz() { --- - `U` = `i32`, `T` = `&'static str` + `U` = `i32`, `T` = `&str` "#]], ); } @@ -10258,7 +10261,7 @@ fn bar() { --- - `T` = `i8`, `U` = `&'static str` + `T` = `i8`, `U` = `&str` "#]], ); } @@ -10566,6 +10569,77 @@ macro_rules! str { ); } +#[test] +fn test_runnables_with_snapshot_tests_indirect_dep() { + check_actions( + r#" +//- /lib.rs crate:foo deps:utils +use utils::expect_test::expect; + +#[test] +fn test$0() { + let actual = "new25"; + expect!["new25"].assert_eq(&actual); +} + +//- /expect-test/lib.rs crate:expect_test +struct Expect; + +impl Expect { + fn assert_eq(&self, actual: &str) {} +} + +#[macro_export] +macro_rules! expect { + ($e:expr) => Expect; // dummy +} + +//- /utils/lib.rs crate:utils deps:expect_test +pub use expect_test; + "#, + expect![[r#" + [ + Reference( + FilePositionWrapper { + file_id: FileId( + 0, + ), + offset: 44, + }, + ), + Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 33..121, + focus_range: 44..48, + name: "test", + kind: Function, + }, + kind: Test { + test_id: Path( + "test", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg: None, + update_test: UpdateTest { + expect_test: true, + insta: false, + snapbox: false, + }, + }, + ), + ] + "#]], + ); +} + #[test] fn drop_glue() { check( @@ -11023,3 +11097,26 @@ impl Enum<'_, Borrowed> { "#]], ); } + +#[test] +fn unknown_should_not_implement_notable_traits() { + check( + r#" +//- minicore: future, iterator +fn foo() { + let x$0; +} + "#, + expect![[r#" + *x* + + ```rust + let x: {unknown} + ``` + + --- + + no Drop + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 7a8514c47af95..507af41d84461 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -8,7 +8,9 @@ use hir::{ ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError, HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym, }; -use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder}; +use ide_db::{ + FileRange, RootDatabase, base_db::salsa, famous_defs::FamousDefs, text_edit::TextEditBuilder, +}; use ide_db::{FxHashSet, text_edit::TextEdit}; use itertools::Itertools; use smallvec::{SmallVec, smallvec}; @@ -105,14 +107,16 @@ pub(crate) fn inlay_hints( } }; let mut preorder = file.preorder(); - while let Some(event) = preorder.next() { - if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none()) - { - preorder.skip_subtree(); - continue; + salsa::attach(sema.db, || { + while let Some(event) = preorder.next() { + if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none()) + { + preorder.skip_subtree(); + continue; + } + hints(event); } - hints(event); - } + }); if let Some(range_limit) = range_limit { acc.retain(|hint| range_limit.contains_range(hint.range)); } @@ -228,9 +232,9 @@ fn hints( chaining::hints(hints, famous_defs, config, display_target, &expr); adjustment::hints(hints, famous_defs, config, display_target, &expr); match expr { - ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, ast::Expr::from(it)), + ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)), ast::Expr::MethodCallExpr(it) => { - param_name::hints(hints, famous_defs, config, ast::Expr::from(it)) + param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)) } ast::Expr::ClosureExpr(it) => { closure_captures::hints(hints, famous_defs, config, it.clone()); @@ -302,6 +306,7 @@ pub struct InlayHintsConfig { pub generic_parameter_hints: GenericParameterHints, pub chaining_hints: bool, pub adjustment_hints: AdjustmentHints, + pub adjustment_hints_disable_reborrows: bool, pub adjustment_hints_mode: AdjustmentHintsMode, pub adjustment_hints_hide_outside_unsafe: bool, pub closure_return_type_hints: ClosureReturnTypeHints, @@ -426,7 +431,7 @@ pub enum LifetimeElisionHints { #[derive(Clone, Debug, PartialEq, Eq)] pub enum AdjustmentHints { Always, - ReborrowOnly, + BorrowsOnly, Never, } @@ -667,7 +672,7 @@ impl fmt::Debug for InlayHintLabelPart { #[derive(Debug)] struct InlayHintLabelBuilder<'a> { - db: &'a RootDatabase, + sema: &'a Semantics<'a, RootDatabase>, result: InlayHintLabel, last_part: String, resolve: bool, @@ -689,7 +694,7 @@ impl HirWrite for InlayHintLabelBuilder<'_> { LazyProperty::Lazy } else { LazyProperty::Computed({ - let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return }; + let Some(location) = ModuleDef::from(def).try_to_nav(self.sema) else { return }; let location = location.call_site(); FileRange { file_id: location.file_id, range: location.focus_or_full_range() } }) @@ -734,48 +739,50 @@ fn label_of_ty( config: &InlayHintsConfig, display_target: DisplayTarget, ) -> Result<(), HirDisplayError> { - let iter_item_type = hint_iterator(sema, famous_defs, ty); - match iter_item_type { - Some((iter_trait, item, ty)) => { - const LABEL_START: &str = "impl "; - const LABEL_ITERATOR: &str = "Iterator"; - const LABEL_MIDDLE: &str = "<"; - const LABEL_ITEM: &str = "Item"; - const LABEL_MIDDLE2: &str = " = "; - const LABEL_END: &str = ">"; - - max_length = max_length.map(|len| { - len.saturating_sub( - LABEL_START.len() - + LABEL_ITERATOR.len() - + LABEL_MIDDLE.len() - + LABEL_MIDDLE2.len() - + LABEL_END.len(), - ) - }); - - label_builder.write_str(LABEL_START)?; - label_builder.start_location_link(ModuleDef::from(iter_trait).into()); - label_builder.write_str(LABEL_ITERATOR)?; - label_builder.end_location_link(); - label_builder.write_str(LABEL_MIDDLE)?; - label_builder.start_location_link(ModuleDef::from(item).into()); - label_builder.write_str(LABEL_ITEM)?; - label_builder.end_location_link(); - label_builder.write_str(LABEL_MIDDLE2)?; - rec(sema, famous_defs, max_length, &ty, label_builder, config, display_target)?; - label_builder.write_str(LABEL_END)?; - Ok(()) + salsa::attach(sema.db, || { + let iter_item_type = hint_iterator(sema, famous_defs, ty); + match iter_item_type { + Some((iter_trait, item, ty)) => { + const LABEL_START: &str = "impl "; + const LABEL_ITERATOR: &str = "Iterator"; + const LABEL_MIDDLE: &str = "<"; + const LABEL_ITEM: &str = "Item"; + const LABEL_MIDDLE2: &str = " = "; + const LABEL_END: &str = ">"; + + max_length = max_length.map(|len| { + len.saturating_sub( + LABEL_START.len() + + LABEL_ITERATOR.len() + + LABEL_MIDDLE.len() + + LABEL_MIDDLE2.len() + + LABEL_END.len(), + ) + }); + + label_builder.write_str(LABEL_START)?; + label_builder.start_location_link(ModuleDef::from(iter_trait).into()); + label_builder.write_str(LABEL_ITERATOR)?; + label_builder.end_location_link(); + label_builder.write_str(LABEL_MIDDLE)?; + label_builder.start_location_link(ModuleDef::from(item).into()); + label_builder.write_str(LABEL_ITEM)?; + label_builder.end_location_link(); + label_builder.write_str(LABEL_MIDDLE2)?; + rec(sema, famous_defs, max_length, &ty, label_builder, config, display_target)?; + label_builder.write_str(LABEL_END)?; + Ok(()) + } + None => ty + .display_truncated(sema.db, max_length, display_target) + .with_closure_style(config.closure_style) + .write_to(label_builder), } - None => ty - .display_truncated(sema.db, max_length, display_target) - .with_closure_style(config.closure_style) - .write_to(label_builder), - } + }) } let mut label_builder = InlayHintLabelBuilder { - db: sema.db, + sema, last_part: String::new(), location: None, result: InlayHintLabel::default(), @@ -880,6 +887,7 @@ mod tests { closure_return_type_hints: ClosureReturnTypeHints::Never, closure_capture_hints: false, adjustment_hints: AdjustmentHints::Never, + adjustment_hints_disable_reborrows: false, adjustment_hints_mode: AdjustmentHintsMode::Prefix, adjustment_hints_hide_outside_unsafe: false, binding_mode_hints: false, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 4d020bac3aad4..0fd587a728408 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -10,7 +10,7 @@ use hir::{ Adjust, Adjustment, AutoBorrow, DisplayTarget, HirDisplay, Mutability, OverloadedDeref, PointerCast, Safety, }; -use ide_db::famous_defs::FamousDefs; +use ide_db::{base_db::salsa, famous_defs::FamousDefs}; use ide_db::text_edit::TextEditBuilder; use syntax::ast::{self, AstNode, prec::ExprPrecedence}; @@ -47,7 +47,22 @@ pub(super) fn hints( let descended = sema.descend_node_into_attributes(expr.clone()).pop(); let desc_expr = descended.as_ref().unwrap_or(expr); - let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; + let mut adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; + + if config.adjustment_hints_disable_reborrows { + // Remove consecutive deref-ref, i.e. reborrows. + let mut i = 0; + while i < adjustments.len().saturating_sub(1) { + let [current, next, ..] = &adjustments[i..] else { unreachable!() }; + if matches!(current.kind, Adjust::Deref(None)) + && matches!(next.kind, Adjust::Borrow(AutoBorrow::Ref(_))) + { + adjustments.splice(i..i + 2, []); + } else { + i += 1; + } + } + } if let ast::Expr::BlockExpr(_) | ast::Expr::IfExpr(_) | ast::Expr::MatchExpr(_) = desc_expr { // Don't show unnecessary reborrows for these, they will just repeat the inner ones again @@ -201,13 +216,15 @@ pub(super) fn hints( text: if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() }, linked_location: None, tooltip: Some(config.lazy_tooltip(|| { - InlayTooltip::Markdown(format!( - "`{}` → `{}`\n\n**{}**\n\n{}", - source.display(sema.db, display_target), - target.display(sema.db, display_target), - coercion, - detailed_tooltip - )) + salsa::attach(sema.db, || { + InlayTooltip::Markdown(format!( + "`{}` → `{}`\n\n**{}**\n\n{}", + source.display(sema.db, display_target), + target.display(sema.db, display_target), + coercion, + detailed_tooltip + )) + }) })), }; if postfix { &mut post } else { &mut pre }.label.append_part(label); @@ -411,10 +428,9 @@ fn main() { (()) == {()}; // ^^& // ^^^^& - let closure: dyn Fn = || (); + let closure: &dyn Fn = &|| (); + //^^^^^^&* closure(); - //^^^^^^^(& - //^^^^^^^) Struct[0]; //^^^^^^(& //^^^^^^) @@ -507,9 +523,10 @@ fn main() { (()) == {()}; // ^^.& // ^^^^.& - let closure: dyn Fn = || (); + let closure: &dyn Fn = &|| (); + //^^^^^^( + //^^^^^^).*.&. closure(); - //^^^^^^^.& Struct[0]; //^^^^^^.& &mut Struct[0]; @@ -714,6 +731,38 @@ fn hello(it: &&[impl T]) { //^^(&** //^^) } +"#, + ); + } + + #[test] + fn disable_reborrows() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_disable_reborrows: true, + ..DISABLED_CONFIG + }, + r#" +#![rustc_coherence_is_core] + +trait ToOwned { + type Owned; + fn to_owned(&self) -> Self::Owned; +} + +struct String; +impl ToOwned for str { + type Owned = String; + fn to_owned(&self) -> Self::Owned { String } +} + +fn a(s: &String) {} + +fn main() { + let s = "".to_owned(); + a(&s) +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 922e9598aa017..104740cbbf74a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -378,9 +378,9 @@ fn main() { let foo = foo3(); // ^^^ impl Fn(f64, f64) -> u32 let foo = foo4(); - // ^^^ &'static dyn Fn(f64, f64) -> u32 + // ^^^ &dyn Fn(f64, f64) -> u32 let foo = foo5(); - // ^^^ &'static dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32 + // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32 let foo = foo6(); // ^^^ impl Fn(f64, f64) -> u32 let foo = foo7(); @@ -411,7 +411,7 @@ fn main() { let foo = foo3(); // ^^^ impl Fn(f64, f64) -> u32 let foo = foo4(); - // ^^^ &'static dyn Fn(f64, f64) -> u32 + // ^^^ &dyn Fn(f64, f64) -> u32 let foo = foo5(); let foo = foo6(); let foo = foo7(); @@ -526,7 +526,7 @@ fn main() { //^^^^ i32 let _ = 22; let test = "test"; - //^^^^ &'static str + //^^^^ &str let test = InnerStruct {}; //^^^^ InnerStruct @@ -616,12 +616,12 @@ impl Iterator for IntoIter { fn main() { let mut data = Vec::new(); - //^^^^ Vec<&'static str> + //^^^^ Vec<&str> data.push("foo"); for i in data { - //^ &'static str + //^ &str let z = i; - //^ &'static str + //^ &str } } "#, @@ -909,7 +909,7 @@ fn main() { foo(plus_one); let add_mul = bar(|x: u8| { x + 1 }); - // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized + // ^^^^^^^ impl FnOnce(u8) -> u8 let closure = if let Some(6) = add_mul(2).checked_sub(1) { // ^^^^^^^ fn(i32) -> i32 @@ -1015,7 +1015,7 @@ fn test(t: T) { "#, expect![[r#" fn test(t: T) { - let f = |a: i32, b: &'static str, c: T| {}; + let f = |a: i32, b: &str, c: T| {}; let result: () = f(42, "", t); } "#]], diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs index f0003dae3f36f..4abd67b91f5ec 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs @@ -44,7 +44,7 @@ pub(super) fn hints( text: "Sized".to_owned(), linked_location: sized_trait.and_then(|it| { config.lazy_location_opt(|| { - it.try_to_nav(sema.db).map(|it| { + it.try_to_nav(sema).map(|it| { let n = it.call_site(); FileRange { file_id: n.file_id, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index ec0a4c46c7fec..754707784055a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -7,7 +7,7 @@ use std::iter::zip; use either::Either; -use hir::Semantics; +use hir::{EditionedFileId, Semantics}; use ide_db::{RootDatabase, famous_defs::FamousDefs}; use stdx::to_lower_snake_case; @@ -19,6 +19,7 @@ pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, krate): &FamousDefs<'_, '_>, config: &InlayHintsConfig, + file_id: EditionedFileId, expr: ast::Expr, ) -> Option<()> { if !config.parameter_hints { @@ -39,6 +40,9 @@ pub(super) fn hints( .filter_map(|(p, arg)| { // Only annotate hints for expressions that exist in the original file let range = sema.original_range_opt(arg.syntax())?; + if range.file_id != file_id { + return None; + } let param_name = p.name(sema.db)?; Some((p, param_name, arg, range)) }) diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 98877482ed863..5febe4ee20bc6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -62,12 +62,12 @@ use std::panic::{AssertUnwindSafe, UnwindSafe}; use cfg::CfgOptions; use fetch_crates::CrateInfo; -use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, sym}; +use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, db::HirDatabase, sym}; use ide_db::{ FxHashMap, FxIndexSet, LineIndexDatabase, base_db::{ CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath, - salsa::Cancelled, + salsa::{self, Cancelled}, }, prime_caches, symbol_index, }; @@ -81,7 +81,7 @@ pub use crate::{ annotations::{Annotation, AnnotationConfig, AnnotationKind, AnnotationLocation}, call_hierarchy::{CallHierarchyConfig, CallItem}, expand_macro::ExpandedMacro, - file_structure::{StructureNode, StructureNodeKind}, + file_structure::{FileStructureConfig, StructureNode, StructureNodeKind}, folding_ranges::{Fold, FoldKind}, highlight_related::{HighlightRelatedConfig, HighlightedRange}, hover::{ @@ -263,7 +263,7 @@ impl Analysis { false, proc_macro_cwd, Arc::new(CrateWorkspaceData { - data_layout: Err("fixture has no layout".into()), + target: Err("fixture has no layout".into()), toolchain: None, }), ); @@ -430,12 +430,16 @@ impl Analysis { /// Returns a tree representation of symbols in the file. Useful to draw a /// file outline. - pub fn file_structure(&self, file_id: FileId) -> Cancellable> { + pub fn file_structure( + &self, + config: &FileStructureConfig, + file_id: FileId, + ) -> Cancellable> { // FIXME: Edition self.with_db(|db| { let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); - - file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree()) + let source_file = db.parse(editioned_file_id_wrapper).tree(); + file_structure::file_structure(&source_file, config) }) } @@ -472,13 +476,18 @@ impl Analysis { /// Fuzzy searches for a symbol. pub fn symbol_search(&self, query: Query, limit: usize) -> Cancellable> { - self.with_db(|db| { - symbol_index::world_symbols(db, query) - .into_iter() // xx: should we make this a par iter? - .filter_map(|s| s.try_to_nav(db)) - .take(limit) - .map(UpmappingResult::call_site) - .collect::>() + // `world_symbols` currently clones the database to run stuff in parallel, which will make any query panic + // if we were to attach it here. + Cancelled::catch(|| { + let symbols = symbol_index::world_symbols(&self.db, query); + salsa::attach(&self.db, || { + symbols + .into_iter() + .filter_map(|s| s.try_to_nav(&Semantics::new(&self.db))) + .take(limit) + .map(UpmappingResult::call_site) + .collect::>() + }) }) } @@ -652,15 +661,6 @@ impl Analysis { }) } - /// Computes syntax highlighting for the given file - pub fn highlight( - &self, - highlight_config: HighlightConfig, - file_id: FileId, - ) -> Cancellable> { - self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None)) - } - /// Computes all ranges to highlight for a given item in a file. pub fn highlight_related( &self, @@ -672,20 +672,56 @@ impl Analysis { }) } + /// Computes syntax highlighting for the given file + pub fn highlight( + &self, + highlight_config: HighlightConfig, + file_id: FileId, + ) -> Cancellable> { + // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database + // highlighting instead sets up the attach hook where neceesary for the trait solver + Cancelled::catch(|| { + syntax_highlighting::highlight(&self.db, highlight_config, file_id, None) + }) + } + /// Computes syntax highlighting for the given file range. pub fn highlight_range( &self, highlight_config: HighlightConfig, frange: FileRange, ) -> Cancellable> { - self.with_db(|db| { - syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range)) + // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database + // highlighting instead sets up the attach hook where neceesary for the trait solver + Cancelled::catch(|| { + syntax_highlighting::highlight( + &self.db, + highlight_config, + frange.file_id, + Some(frange.range), + ) + }) + } + + /// Computes syntax highlighting for the given file. + pub fn highlight_as_html_with_config( + &self, + config: HighlightConfig, + file_id: FileId, + rainbow: bool, + ) -> Cancellable { + // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database + // highlighting instead sets up the attach hook where neceesary for the trait solver + Cancelled::catch(|| { + syntax_highlighting::highlight_as_html_with_config(&self.db, config, file_id, rainbow) }) } /// Computes syntax highlighting for the given file. pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable { - self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) + // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database + // highlighting instead sets up the attach hook where neceesary for the trait solver + Cancelled::catch(|| syntax_highlighting::highlight_as_html(&self.db, file_id, rainbow)) } /// Computes completions at the given position. @@ -863,8 +899,12 @@ impl Analysis { where F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, { - let snap = self.db.clone(); - Cancelled::catch(|| f(&snap)) + salsa::attach(&self.db, || { + // the trait solver code may invoke `as_view` outside of queries, + // so technically we might run into a panic in salsa if the downcaster has not yet been registered. + HirDatabase::zalsa_register_downcaster(&self.db); + Cancelled::catch(|| f(&self.db)) + }) } } diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index 795c1f2ca3c0b..f1aa03c8f2672 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -209,7 +209,6 @@ pub(crate) fn def_to_kind(db: &RootDatabase, def: Definition) -> SymbolInformati Definition::Const(..) => Constant, Definition::Static(..) => StaticVariable, Definition::Trait(..) => Trait, - Definition::TraitAlias(..) => Trait, Definition::TypeAlias(it) => { if it.as_assoc_item(db).is_some() { AssociatedType diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs index f3bb3df1cd8d7..b5d47c83a5543 100644 --- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs +++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs @@ -72,7 +72,6 @@ fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) - SyntaxKind::MACRO_CALL, SyntaxKind::TYPE_ALIAS, SyntaxKind::TRAIT, - SyntaxKind::TRAIT_ALIAS, SyntaxKind::IMPL, SyntaxKind::MACRO_DEF, SyntaxKind::STRUCT, diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index 7dc18141bdbc1..46ff16f972625 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -5,13 +5,15 @@ use std::fmt; use arrayvec::ArrayVec; use either::Either; use hir::{ - AssocItem, FieldSource, HasContainer, HasCrate, HasSource, HirDisplay, HirFileId, InFile, - LocalSource, ModuleSource, db::ExpandDatabase, symbols::FileSymbol, + AssocItem, Crate, FieldSource, HasContainer, HasCrate, HasSource, HirDisplay, HirFileId, + InFile, LocalSource, ModuleSource, Semantics, db::ExpandDatabase, symbols::FileSymbol, }; use ide_db::{ FileId, FileRange, RootDatabase, SymbolKind, - defs::Definition, + base_db::{CrateOrigin, LangCrateOrigin, RootQueryDb, salsa}, + defs::{Definition, find_std_module}, documentation::{Documentation, HasDocs}, + famous_defs::FamousDefs, }; use span::Edition; use stdx::never; @@ -81,14 +83,20 @@ pub(crate) trait ToNav { } pub trait TryToNav { - fn try_to_nav(&self, db: &RootDatabase) -> Option>; + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option>; } impl TryToNav for Either { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { match self { - Either::Left(it) => it.try_to_nav(db), - Either::Right(it) => it.try_to_nav(db), + Either::Left(it) => it.try_to_nav(sema), + Either::Right(it) => it.try_to_nav(sema), } } } @@ -183,7 +191,11 @@ impl NavigationTarget { } impl TryToNav for FileSymbol { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let edition = self.def.module(db).map(|it| it.krate().edition(db)).unwrap_or(Edition::CURRENT); let display_target = self.def.krate(db).to_display_target(db); @@ -226,9 +238,6 @@ impl TryToNav for FileSymbol { hir::ModuleDef::Trait(it) => { Some(it.display(db, display_target).to_string()) } - hir::ModuleDef::TraitAlias(it) => { - Some(it.display(db, display_target).to_string()) - } hir::ModuleDef::TypeAlias(it) => { Some(it.display(db, display_target).to_string()) } @@ -245,51 +254,55 @@ impl TryToNav for FileSymbol { } impl TryToNav for Definition { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { match self { - Definition::Local(it) => Some(it.to_nav(db)), - Definition::Label(it) => it.try_to_nav(db), - Definition::Module(it) => Some(it.to_nav(db)), - Definition::Crate(it) => Some(it.to_nav(db)), - Definition::Macro(it) => it.try_to_nav(db), - Definition::Field(it) => it.try_to_nav(db), - Definition::SelfType(it) => it.try_to_nav(db), - Definition::GenericParam(it) => it.try_to_nav(db), - Definition::Function(it) => it.try_to_nav(db), - Definition::Adt(it) => it.try_to_nav(db), - Definition::Variant(it) => it.try_to_nav(db), - Definition::Const(it) => it.try_to_nav(db), - Definition::Static(it) => it.try_to_nav(db), - Definition::Trait(it) => it.try_to_nav(db), - Definition::TraitAlias(it) => it.try_to_nav(db), - Definition::TypeAlias(it) => it.try_to_nav(db), - Definition::ExternCrateDecl(it) => it.try_to_nav(db), - Definition::InlineAsmOperand(it) => it.try_to_nav(db), + Definition::Local(it) => Some(it.to_nav(sema.db)), + Definition::Label(it) => it.try_to_nav(sema), + Definition::Module(it) => Some(it.to_nav(sema.db)), + Definition::Crate(it) => Some(it.to_nav(sema.db)), + Definition::Macro(it) => it.try_to_nav(sema), + Definition::Field(it) => it.try_to_nav(sema), + Definition::SelfType(it) => it.try_to_nav(sema), + Definition::GenericParam(it) => it.try_to_nav(sema), + Definition::Function(it) => it.try_to_nav(sema), + Definition::Adt(it) => it.try_to_nav(sema), + Definition::Variant(it) => it.try_to_nav(sema), + Definition::Const(it) => it.try_to_nav(sema), + Definition::Static(it) => it.try_to_nav(sema), + Definition::Trait(it) => it.try_to_nav(sema), + Definition::TypeAlias(it) => it.try_to_nav(sema), + Definition::ExternCrateDecl(it) => it.try_to_nav(sema), + Definition::InlineAsmOperand(it) => it.try_to_nav(sema), + Definition::BuiltinType(it) => it.try_to_nav(sema), Definition::BuiltinLifetime(_) - | Definition::BuiltinType(_) | Definition::TupleField(_) | Definition::ToolModule(_) | Definition::InlineAsmRegOrRegClass(_) | Definition::BuiltinAttr(_) => None, // FIXME: The focus range should be set to the helper declaration - Definition::DeriveHelper(it) => it.derive().try_to_nav(db), + Definition::DeriveHelper(it) => it.derive().try_to_nav(sema), } } } impl TryToNav for hir::ModuleDef { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { match self { - hir::ModuleDef::Module(it) => Some(it.to_nav(db)), - hir::ModuleDef::Function(it) => it.try_to_nav(db), - hir::ModuleDef::Adt(it) => it.try_to_nav(db), - hir::ModuleDef::Variant(it) => it.try_to_nav(db), - hir::ModuleDef::Const(it) => it.try_to_nav(db), - hir::ModuleDef::Static(it) => it.try_to_nav(db), - hir::ModuleDef::Trait(it) => it.try_to_nav(db), - hir::ModuleDef::TraitAlias(it) => it.try_to_nav(db), - hir::ModuleDef::TypeAlias(it) => it.try_to_nav(db), - hir::ModuleDef::Macro(it) => it.try_to_nav(db), + hir::ModuleDef::Module(it) => Some(it.to_nav(sema.db)), + hir::ModuleDef::Function(it) => it.try_to_nav(sema), + hir::ModuleDef::Adt(it) => it.try_to_nav(sema), + hir::ModuleDef::Variant(it) => it.try_to_nav(sema), + hir::ModuleDef::Const(it) => it.try_to_nav(sema), + hir::ModuleDef::Static(it) => it.try_to_nav(sema), + hir::ModuleDef::Trait(it) => it.try_to_nav(sema), + hir::ModuleDef::TypeAlias(it) => it.try_to_nav(sema), + hir::ModuleDef::Macro(it) => it.try_to_nav(sema), hir::ModuleDef::BuiltinType(_) => None, } } @@ -366,19 +379,17 @@ impl ToNavFromAst for hir::Trait { container_name(db, self, self.krate(db).edition(db)) } } -impl ToNavFromAst for hir::TraitAlias { - const KIND: SymbolKind = SymbolKind::TraitAlias; - fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self, self.krate(db).edition(db)) - } -} impl TryToNav for D where D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay + HasCrate, D::Ast: ast::HasName, { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let src = self.source(db)?; Some( NavigationTarget::from_named( @@ -388,8 +399,9 @@ where ) .map(|mut res| { res.docs = self.docs(db); - res.description = - Some(self.display(db, self.krate(db).to_display_target(db)).to_string()); + res.description = salsa::attach(db, || { + Some(self.display(db, self.krate(db).to_display_target(db)).to_string()) + }); res.container_name = self.container_name(db); res }), @@ -431,7 +443,11 @@ impl ToNav for hir::Crate { } impl TryToNav for hir::Impl { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let InFile { file_id, value } = self.source(db)?; let derive_path = self.as_builtin_derive_path(db); @@ -455,7 +471,11 @@ impl TryToNav for hir::Impl { } impl TryToNav for hir::ExternCrateDecl { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let src = self.source(db)?; let InFile { file_id, value } = src; let focus = value @@ -487,7 +507,11 @@ impl TryToNav for hir::ExternCrateDecl { } impl TryToNav for hir::Field { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let src = self.source(db)?; let krate = self.parent_def(db).module(db).krate(); @@ -496,8 +520,9 @@ impl TryToNav for hir::Field { NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map( |mut res| { res.docs = self.docs(db); - res.description = - Some(self.display(db, krate.to_display_target(db)).to_string()); + res.description = salsa::attach(db, || { + Some(self.display(db, krate.to_display_target(db)).to_string()) + }); res }, ) @@ -519,7 +544,11 @@ impl TryToNav for hir::Field { } impl TryToNav for hir::Macro { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let src = self.source(db)?; let name_owner: &dyn ast::HasName = match &src.value { Either::Left(it) => it, @@ -540,31 +569,40 @@ impl TryToNav for hir::Macro { } impl TryToNav for hir::Adt { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { match self { - hir::Adt::Struct(it) => it.try_to_nav(db), - hir::Adt::Union(it) => it.try_to_nav(db), - hir::Adt::Enum(it) => it.try_to_nav(db), + hir::Adt::Struct(it) => it.try_to_nav(sema), + hir::Adt::Union(it) => it.try_to_nav(sema), + hir::Adt::Enum(it) => it.try_to_nav(sema), } } } impl TryToNav for hir::AssocItem { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { match self { - AssocItem::Function(it) => it.try_to_nav(db), - AssocItem::Const(it) => it.try_to_nav(db), - AssocItem::TypeAlias(it) => it.try_to_nav(db), + AssocItem::Function(it) => it.try_to_nav(sema), + AssocItem::Const(it) => it.try_to_nav(sema), + AssocItem::TypeAlias(it) => it.try_to_nav(sema), } } } impl TryToNav for hir::GenericParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { match self { - hir::GenericParam::TypeParam(it) => it.try_to_nav(db), - hir::GenericParam::ConstParam(it) => it.try_to_nav(db), - hir::GenericParam::LifetimeParam(it) => it.try_to_nav(db), + hir::GenericParam::TypeParam(it) => it.try_to_nav(sema), + hir::GenericParam::ConstParam(it) => it.try_to_nav(sema), + hir::GenericParam::LifetimeParam(it) => it.try_to_nav(sema), } } } @@ -613,7 +651,11 @@ impl ToNav for hir::Local { } impl TryToNav for hir::Label { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let InFile { file_id, value } = self.source(db)?; // Labels can't be keywords, so no escaping needed. let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); @@ -635,7 +677,11 @@ impl TryToNav for hir::Label { } impl TryToNav for hir::TypeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let InFile { file_id, value } = self.merge().source(db)?; let edition = self.module(db).krate().edition(db); let name = self.name(db).display_no_db(edition).to_smolstr(); @@ -672,13 +718,20 @@ impl TryToNav for hir::TypeParam { } impl TryToNav for hir::TypeOrConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { - self.split(db).try_to_nav(db) + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + self.split(sema.db).try_to_nav(sema) } } impl TryToNav for hir::LifetimeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let InFile { file_id, value } = self.source(db)?; // Lifetimes cannot be keywords, so not escaping needed. let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); @@ -700,7 +753,11 @@ impl TryToNav for hir::LifetimeParam { } impl TryToNav for hir::ConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let InFile { file_id, value } = self.merge().source(db)?; let edition = self.module(db).krate().edition(db); let name = self.name(db).display_no_db(edition).to_smolstr(); @@ -730,7 +787,11 @@ impl TryToNav for hir::ConstParam { } impl TryToNav for hir::InlineAsmOperand { - fn try_to_nav(&self, db: &RootDatabase) -> Option> { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; let InFile { file_id, value } = &self.source(db)?; let file_id = *file_id; Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( @@ -754,6 +815,28 @@ impl TryToNav for hir::InlineAsmOperand { } } +impl TryToNav for hir::BuiltinType { + fn try_to_nav( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option> { + let db = sema.db; + let krate = db + .all_crates() + .iter() + .copied() + .find(|&krate| matches!(krate.data(db).origin, CrateOrigin::Lang(LangCrateOrigin::Std))) + .map(Crate::from)?; + let edition = krate.edition(db); + + let fd = FamousDefs(sema, krate); + let primitive_mod = format!("prim_{}", self.name().display(fd.0.db, edition)); + let doc_owner = find_std_module(&fd, &primitive_mod, edition)?; + + Some(doc_owner.to_nav(db)) + } +} + #[derive(Debug)] pub struct UpmappingResult { /// The macro call site. diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 86b88a17c75fc..0189939eac310 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -138,7 +138,7 @@ pub(crate) fn find_all_refs( Definition::Module(module) => { Some(NavigationTarget::from_module_to_decl(sema.db, module)) } - def => def.try_to_nav(sema.db), + def => def.try_to_nav(sema), } .map(|nav| { let (nav, extra_ref) = match nav.def_site { @@ -1783,7 +1783,7 @@ trait Bar$0 = Foo where Self: ; fn foo(_: impl Bar, _: &dyn Bar) {} "#, expect![[r#" - Bar TraitAlias FileId(0) 13..42 19..22 + Bar Trait FileId(0) 13..42 19..22 FileId(0) 53..56 FileId(0) 66..69 diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index aea4ae0fd9702..8922a8eb48580 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -27,6 +27,27 @@ pub use ide_db::rename::RenameError; type RenameResult = Result; +/// This is similar to `collect::, _>>`, but unlike it, it succeeds if there is *any* `Ok` item. +fn ok_if_any(iter: impl Iterator>) -> Result, E> { + let mut err = None; + let oks = iter + .filter_map(|item| match item { + Ok(it) => Some(it), + Err(it) => { + err = Some(it); + None + } + }) + .collect::>(); + if !oks.is_empty() { + Ok(oks) + } else if let Some(err) = err { + Err(err) + } else { + Ok(Vec::new()) + } +} + /// Prepares a rename. The sole job of this function is to return the TextRange of the thing that is /// being targeted for a rename. pub(crate) fn prepare_rename( @@ -95,58 +116,57 @@ pub(crate) fn rename( alias_fallback(syntax, position, &new_name.display(db, edition).to_string()); let ops: RenameResult> = match alias_fallback { - Some(_) => defs - // FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can - // properly find "direct" usages/references. - .map(|(.., def, new_name, _)| { - match kind { - IdentifierKind::Ident => (), - IdentifierKind::Lifetime => { - bail!("Cannot alias reference to a lifetime identifier") - } - IdentifierKind::Underscore => bail!("Cannot alias reference to `_`"), - IdentifierKind::LowercaseSelf => { - bail!("Cannot rename alias reference to `self`") - } - }; - let mut usages = def.usages(&sema).all(); - - // FIXME: hack - removes the usage that triggered this rename operation. - match usages.references.get_mut(&file_id).and_then(|refs| { - refs.iter() - .position(|ref_| ref_.range.contains_inclusive(position.offset)) - .map(|idx| refs.remove(idx)) - }) { - Some(_) => (), - None => never!(), - }; - - let mut source_change = SourceChange::default(); - source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| { - ( - position.file_id, - source_edit_from_references(db, refs, def, &new_name, edition), - ) - })); - - Ok(source_change) - }) - .collect(), - None => defs - .map(|(.., def, new_name, rename_def)| { - if let Definition::Local(local) = def { - if let Some(self_param) = local.as_self_param(sema.db) { - cov_mark::hit!(rename_self_to_param); - return rename_self_to_param(&sema, local, self_param, &new_name, kind); - } - if kind == IdentifierKind::LowercaseSelf { - cov_mark::hit!(rename_to_self); - return rename_to_self(&sema, local); - } + Some(_) => ok_if_any( + defs + // FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can + // properly find "direct" usages/references. + .map(|(.., def, new_name, _)| { + match kind { + IdentifierKind::Ident => (), + IdentifierKind::Lifetime => { + bail!("Cannot alias reference to a lifetime identifier") + } + IdentifierKind::Underscore => bail!("Cannot alias reference to `_`"), + IdentifierKind::LowercaseSelf => { + bail!("Cannot rename alias reference to `self`") + } + }; + let mut usages = def.usages(&sema).all(); + + // FIXME: hack - removes the usage that triggered this rename operation. + match usages.references.get_mut(&file_id).and_then(|refs| { + refs.iter() + .position(|ref_| ref_.range.contains_inclusive(position.offset)) + .map(|idx| refs.remove(idx)) + }) { + Some(_) => (), + None => never!(), + }; + + let mut source_change = SourceChange::default(); + source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| { + ( + position.file_id, + source_edit_from_references(db, refs, def, &new_name, edition), + ) + })); + + Ok(source_change) + }), + ), + None => ok_if_any(defs.map(|(.., def, new_name, rename_def)| { + if let Definition::Local(local) = def { + if let Some(self_param) = local.as_self_param(sema.db) { + cov_mark::hit!(rename_self_to_param); + return rename_self_to_param(&sema, local, self_param, &new_name, kind); } - def.rename(&sema, new_name.as_str(), rename_def) - }) - .collect(), + if kind == IdentifierKind::LowercaseSelf { + cov_mark::hit!(rename_to_self); + return rename_to_self(&sema, local); + } + } + def.rename(&sema, new_name.as_str(), rename_def) + })), }; ops?.into_iter() @@ -320,7 +340,7 @@ fn find_definitions( }) }); - let res: RenameResult> = symbols.filter_map(Result::transpose).collect(); + let res: RenameResult> = ok_if_any(symbols.filter_map(Result::transpose)); match res { Ok(v) => { // remove duplicates, comparing `Definition`s diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 83e5c5ab1dfeb..ec13ba0fde340 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -4,13 +4,13 @@ use arrayvec::ArrayVec; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; use hir::{ - AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, ModPath, Name, PathKind, Semantics, - Symbol, db::HirDatabase, sym, + AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, + sym, }; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::{ FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind, - base_db::RootQueryDb, + base_db::{RootQueryDb, salsa}, defs::Definition, documentation::docs_from_attrs, helpers::visit_file_defs, @@ -158,15 +158,15 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { Definition::SelfType(impl_) => runnable_impl(&sema, &impl_), _ => None, }; - add_opt(runnable.or_else(|| module_def_doctest(sema.db, def)), Some(def)); + add_opt(runnable.or_else(|| module_def_doctest(&sema, def)), Some(def)); if let Definition::SelfType(impl_) = def { impl_.items(db).into_iter().for_each(|assoc| { let runnable = match assoc { hir::AssocItem::Function(it) => { - runnable_fn(&sema, it).or_else(|| module_def_doctest(sema.db, it.into())) + runnable_fn(&sema, it).or_else(|| module_def_doctest(&sema, it.into())) } - hir::AssocItem::Const(it) => module_def_doctest(sema.db, it.into()), - hir::AssocItem::TypeAlias(it) => module_def_doctest(sema.db, it.into()), + hir::AssocItem::Const(it) => module_def_doctest(&sema, it.into()), + hir::AssocItem::TypeAlias(it) => module_def_doctest(&sema, it.into()), }; add_opt(runnable, Some(assoc.into())) }); @@ -352,8 +352,7 @@ pub(crate) fn runnable_fn( .call_site(); let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db); - let update_test = - UpdateTest::find_snapshot_macro(sema, &fn_source.file_syntax(sema.db), file_range); + let update_test = UpdateTest::find_snapshot_macro(sema, file_range); let cfg = def.attrs(sema.db).cfg(); Some(Runnable { use_name_in_title: false, nav, kind, cfg, update_test }) @@ -388,7 +387,7 @@ pub(crate) fn runnable_mod( file_id: module_source.file_id.original_file(sema.db), range: module_syntax.text_range(), }; - let update_test = UpdateTest::find_snapshot_macro(sema, &module_syntax, file_range); + let update_test = UpdateTest::find_snapshot_macro(sema, file_range); Some(Runnable { use_name_in_title: false, @@ -410,15 +409,17 @@ pub(crate) fn runnable_impl( return None; } let cfg = attrs.cfg(); - let nav = def.try_to_nav(sema.db)?.call_site(); + let nav = def.try_to_nav(sema)?.call_site(); let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); let mut ty_args = ty.generic_parameters(sema.db, display_target).peekable(); - let params = if ty_args.peek().is_some() { - format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))) - } else { - String::new() - }; + let params = salsa::attach(sema.db, || { + if ty_args.peek().is_some() { + format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))) + } else { + String::new() + } + }); let mut test_id = format!("{}{params}", adt_name.display(sema.db, edition)); test_id.retain(|c| c != ' '); let test_id = TestId::Path(test_id); @@ -426,8 +427,7 @@ pub(crate) fn runnable_impl( let impl_source = sema.source(*def)?; let impl_syntax = impl_source.syntax(); let file_range = impl_syntax.original_file_range_with_macro_call_input(sema.db); - let update_test = - UpdateTest::find_snapshot_macro(sema, &impl_syntax.file_syntax(sema.db), file_range); + let update_test = UpdateTest::find_snapshot_macro(sema, file_range); Some(Runnable { use_name_in_title: false, @@ -473,7 +473,7 @@ fn runnable_mod_outline_definition( file_id: mod_source.file_id.original_file(sema.db), range: mod_syntax.text_range(), }; - let update_test = UpdateTest::find_snapshot_macro(sema, &mod_syntax, file_range); + let update_test = UpdateTest::find_snapshot_macro(sema, file_range); Some(Runnable { use_name_in_title: false, @@ -484,7 +484,8 @@ fn runnable_mod_outline_definition( }) } -fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { +fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Option { + let db = sema.db; let attrs = match def { Definition::Module(it) => it.attrs(db), Definition::Function(it) => it.attrs(db), @@ -493,7 +494,6 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { Definition::Const(it) => it.attrs(db), Definition::Static(it) => it.attrs(db), Definition::Trait(it) => it.attrs(db), - Definition::TraitAlias(it) => it.attrs(db), Definition::TypeAlias(it) => it.attrs(db), Definition::Macro(it) => it.attrs(db), Definition::SelfType(it) => it.attrs(db), @@ -522,7 +522,9 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { let mut ty_args = ty.generic_parameters(db, display_target).peekable(); format_to!(path, "{}", name.display(db, edition)); if ty_args.peek().is_some() { - format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); + salsa::attach(db, || { + format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); + }); } format_to!(path, "::{}", def_name.display(db, edition)); path.retain(|c| c != ' '); @@ -537,7 +539,7 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { let mut nav = match def { Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def), - def => def.try_to_nav(db)?, + def => def.try_to_nav(sema)?, } .call_site(); nav.focus_range = None; @@ -637,7 +639,7 @@ pub struct UpdateTest { pub snapbox: bool, } -static SNAPSHOT_TEST_MACROS: OnceLock>> = OnceLock::new(); +static SNAPSHOT_TEST_MACROS: OnceLock>> = OnceLock::new(); impl UpdateTest { const EXPECT_CRATE: &str = "expect_test"; @@ -661,22 +663,17 @@ impl UpdateTest { const SNAPBOX_CRATE: &str = "snapbox"; const SNAPBOX_MACROS: &[&str] = &["assert_data_eq", "file", "str"]; - fn find_snapshot_macro( - sema: &Semantics<'_, RootDatabase>, - scope: &SyntaxNode, - file_range: hir::FileRange, - ) -> Self { + fn find_snapshot_macro(sema: &Semantics<'_, RootDatabase>, file_range: hir::FileRange) -> Self { fn init<'a>( krate_name: &'a str, paths: &[&str], - map: &mut FxHashMap<&'a str, Vec>, + map: &mut FxHashMap<&'a str, Vec<[Symbol; 2]>>, ) { let mut res = Vec::with_capacity(paths.len()); - let krate = Name::new_symbol_root(Symbol::intern(krate_name)); + let krate = Symbol::intern(krate_name); for path in paths { - let segments = [krate.clone(), Name::new_symbol_root(Symbol::intern(path))]; - let mod_path = ModPath::from_segments(PathKind::Abs, segments); - res.push(mod_path); + let segments = [krate.clone(), Symbol::intern(path)]; + res.push(segments); } map.insert(krate_name, res); } @@ -690,11 +687,9 @@ impl UpdateTest { }); let search_scope = SearchScope::file_range(file_range); - let find_macro = |paths: &[ModPath]| { + let find_macro = |paths: &[[Symbol; 2]]| { for path in paths { - let Some(items) = sema.resolve_mod_path(scope, path) else { - continue; - }; + let items = hir::resolve_absolute_path(sema.db, path.iter().cloned()); for item in items { if let hir::ItemInNs::Macros(makro) = item && Definition::Macro(makro) diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index 382573b680113..f45d096ac1904 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -11,6 +11,7 @@ use hir::{ use ide_db::{ FilePosition, FxIndexMap, active_parameter::{callable_for_arg_list, generic_def_for_node}, + base_db::salsa, documentation::{Documentation, HasDocs}, }; use itertools::Itertools; @@ -266,12 +267,12 @@ fn signature_help_for_call( // In that case, fall back to render definitions of the respective parameters. // This is overly conservative: we do not substitute known type vars // (see FIXME in tests::impl_trait) and falling back on any unknowns. - match (p.ty().contains_unknown(), fn_params.as_deref()) { + salsa::attach(db, || match (p.ty().contains_unknown(), fn_params.as_deref()) { (true, Some(fn_params)) => { format_to!(buf, "{}", fn_params[idx].ty().display(db, display_target)) } _ => format_to!(buf, "{}", p.ty().display(db, display_target)), - } + }); res.push_call_param(&buf); } } @@ -339,10 +340,6 @@ fn signature_help_for_generics( res.doc = it.docs(db); format_to!(res.signature, "trait {}", it.name(db).display(db, edition)); } - hir::GenericDef::TraitAlias(it) => { - res.doc = it.docs(db); - format_to!(res.signature, "trait {}", it.name(db).display(db, edition)); - } hir::GenericDef::TypeAlias(it) => { res.doc = it.docs(db); format_to!(res.signature, "type {}", it.name(db).display(db, edition)); @@ -733,7 +730,7 @@ fn signature_help_for_tuple_pat_ish<'db>( mod tests { use expect_test::{Expect, expect}; - use ide_db::FilePosition; + use ide_db::{FilePosition, base_db::salsa}; use stdx::format_to; use test_fixture::ChangeFixture; @@ -762,7 +759,7 @@ mod tests { "# ); let (db, position) = position(&fixture); - let sig_help = crate::signature_help::signature_help(&db, position); + let sig_help = salsa::attach(&db, || crate::signature_help::signature_help(&db, position)); let actual = match sig_help { Some(sig_help) => { let mut rendered = String::new(); diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 694ac22e1993b..8214b4d1de22f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -5,7 +5,7 @@ use arrayvec::ArrayVec; use hir::{Crate, Module, Semantics, db::HirDatabase}; use ide_db::{ FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, - base_db::{RootQueryDb, SourceDatabase, VfsPath}, + base_db::{RootQueryDb, SourceDatabase, VfsPath, salsa}, defs::{Definition, IdentClass}, documentation::Documentation, famous_defs::FamousDefs, @@ -169,6 +169,7 @@ impl StaticIndex<'_> { closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock, lifetime_elision_hints: crate::LifetimeElisionHints::Never, adjustment_hints: crate::AdjustmentHints::Never, + adjustment_hints_disable_reborrows: true, adjustment_hints_mode: AdjustmentHintsMode::Prefix, adjustment_hints_hide_outside_unsafe: false, implicit_drop_hints: false, @@ -227,30 +228,32 @@ impl StaticIndex<'_> { let id = if let Some(it) = self.def_map.get(&def) { *it } else { - let it = self.tokens.insert(TokenStaticData { - documentation: documentation_for_definition(&sema, def, scope_node), - hover: Some(hover_for_definition( - &sema, - file_id, - def, - None, - scope_node, - None, - false, - &hover_config, - edition, - display_target, - )), - definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { - FileRange { file_id: it.file_id, range: it.focus_or_full_range() } - }), - references: vec![], - moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), - display_name: def - .name(self.db) - .map(|name| name.display(self.db, edition).to_string()), - signature: Some(def.label(self.db, display_target)), - kind: def_to_kind(self.db, def), + let it = salsa::attach(sema.db, || { + self.tokens.insert(TokenStaticData { + documentation: documentation_for_definition(&sema, def, scope_node), + hover: Some(hover_for_definition( + &sema, + file_id, + def, + None, + scope_node, + None, + false, + &hover_config, + edition, + display_target, + )), + definition: def.try_to_nav(&sema).map(UpmappingResult::call_site).map( + |it| FileRange { file_id: it.file_id, range: it.focus_or_full_range() }, + ), + references: vec![], + moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), + display_name: def + .name(self.db) + .map(|name| name.display(self.db, edition).to_string()), + signature: Some(def.label(self.db, display_target)), + kind: def_to_kind(self.db, def), + }) }); self.def_map.insert(def, it); it @@ -258,7 +261,7 @@ impl StaticIndex<'_> { let token = self.tokens.get_mut(id).unwrap(); token.references.push(ReferenceData { range: FileRange { range, file_id }, - is_definition: match def.try_to_nav(self.db).map(UpmappingResult::call_site) { + is_definition: match def.try_to_nav(&sema).map(UpmappingResult::call_site) { Some(it) => it.file_id == file_id && it.focus_or_full_range() == range, None => false, }, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 3ca172977cb9e..4e43387f8d9da 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -16,7 +16,7 @@ use std::ops::ControlFlow; use either::Either; use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics}; -use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind}; +use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind, base_db::salsa}; use syntax::{ AstNode, AstToken, NodeOrToken, SyntaxKind::*, @@ -35,6 +35,7 @@ use crate::{ }; pub(crate) use html::highlight_as_html; +pub(crate) use html::highlight_as_html_with_config; #[derive(Debug, Clone, Copy)] pub struct HlRange { @@ -47,6 +48,8 @@ pub struct HlRange { pub struct HighlightConfig { /// Whether to highlight strings pub strings: bool, + /// Whether to highlight comments + pub comments: bool, /// Whether to highlight punctuation pub punctuation: bool, /// Whether to specialize punctuation highlights @@ -434,15 +437,17 @@ fn traverse( |node| unsafe_ops.contains(&InFile::new(descended_element.file_id, node)); let element = match descended_element.value { NodeOrToken::Node(name_like) => { - let hl = highlight::name_like( - sema, - krate, - bindings_shadow_count, - &is_unsafe_node, - config.syntactic_name_ref_highlighting, - name_like, - edition, - ); + let hl = salsa::attach(sema.db, || { + highlight::name_like( + sema, + krate, + bindings_shadow_count, + &is_unsafe_node, + config.syntactic_name_ref_highlighting, + name_like, + edition, + ) + }); if hl.is_some() && !in_macro { // skip highlighting the contained token of our name-like node // as that would potentially overwrite our result @@ -450,10 +455,10 @@ fn traverse( } hl } - NodeOrToken::Token(token) => { + NodeOrToken::Token(token) => salsa::attach(sema.db, || { highlight::token(sema, token, edition, &is_unsafe_node, tt_level > 0) .zip(Some(None)) - } + }), }; if let Some((mut highlight, binding_hash)) = element { if is_unlinked && highlight.tag == HlTag::UnresolvedReference { @@ -586,6 +591,7 @@ fn descend_token( fn filter_by_config(highlight: &mut Highlight, config: HighlightConfig) -> bool { match &mut highlight.tag { HlTag::StringLiteral if !config.strings => return false, + HlTag::Comment if !config.comments => return false, // If punctuation is disabled, make the macro bang part of the macro call again. tag @ HlTag::Punctuation(HlPunct::MacroBang) => { if !config.macro_bang { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 8bde8fd970063..d73575fb9549a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -576,7 +576,6 @@ pub(super) fn highlight_def( h } Definition::Trait(_) => Highlight::new(HlTag::Symbol(SymbolKind::Trait)), - Definition::TraitAlias(_) => Highlight::new(HlTag::Symbol(SymbolKind::TraitAlias)), Definition::TypeAlias(type_) => { let mut h = Highlight::new(HlTag::Symbol(SymbolKind::TypeAlias)); @@ -780,7 +779,6 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight { MACRO_RULES => SymbolKind::Macro, CONST_PARAM => SymbolKind::ConstParam, SELF_PARAM => SymbolKind::SelfParam, - TRAIT_ALIAS => SymbolKind::TraitAlias, ASM_OPERAND_NAMED => SymbolKind::Local, _ => return default.into(), }; diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs index 9fd807f031f1f..358ac9b4ef352 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs @@ -10,7 +10,12 @@ use crate::{ syntax_highlighting::{HighlightConfig, highlight}, }; -pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { +pub(crate) fn highlight_as_html_with_config( + db: &RootDatabase, + config: HighlightConfig, + file_id: FileId, + rainbow: bool, +) -> String { let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) @@ -27,21 +32,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo ) } - let hl_ranges = highlight( - db, - HighlightConfig { - strings: true, - punctuation: true, - specialize_punctuation: true, - specialize_operator: true, - operator: true, - inject_doc_comment: true, - macro_bang: true, - syntactic_name_ref_highlighting: false, - }, - file_id.file_id(db), - None, - ); + let hl_ranges = highlight(db, config, file_id.file_id(db), None); let text = file.to_string(); let mut buf = String::new(); buf.push_str(STYLE); @@ -66,6 +57,25 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo buf } +pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { + highlight_as_html_with_config( + db, + HighlightConfig { + strings: true, + comments: true, + punctuation: true, + specialize_punctuation: true, + specialize_operator: true, + operator: true, + inject_doc_comment: true, + macro_bang: true, + syntactic_name_ref_highlighting: false, + }, + file_id, + rainbow, + ) +} + //FIXME: like, real html escaping fn html_escape(text: &str) -> String { text.replace('<', "<").replace('>', ">") diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs index 7f5c2c1ec849b..4bb7308024144 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs @@ -5,7 +5,7 @@ use std::mem; use either::Either; use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym}; use ide_db::{ - SymbolKind, active_parameter::ActiveParameter, defs::Definition, + SymbolKind, active_parameter::ActiveParameter, base_db::salsa, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence, }; use syntax::{ @@ -26,7 +26,8 @@ pub(super) fn ra_fixture( literal: &ast::String, expanded: &ast::String, ) -> Option<()> { - let active_parameter = ActiveParameter::at_token(sema, expanded.syntax().clone())?; + let active_parameter = + salsa::attach(sema.db, || ActiveParameter::at_token(sema, expanded.syntax().clone()))?; let has_rust_fixture_attr = active_parameter.attrs().is_some_and(|attrs| { attrs.filter_map(|attr| attr.as_simple_path()).any(|path| { path.segments() @@ -79,6 +80,7 @@ pub(super) fn ra_fixture( .highlight( HighlightConfig { syntactic_name_ref_highlighting: false, + comments: true, punctuation: true, operator: true, strings: true, @@ -126,32 +128,34 @@ pub(super) fn doc_comment( // Extract intra-doc links and emit highlights for them. if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) { - extract_definitions_from_docs(&docs) - .into_iter() - .filter_map(|(range, link, ns)| { - doc_mapping - .map(range) - .filter(|(mapping, _)| mapping.file_id == src_file_id) - .and_then(|(InFile { value: mapped_range, .. }, attr_id)| { - Some(mapped_range).zip(resolve_doc_path_for_def( - sema.db, - def, - &link, - ns, - attr_id.is_inner_attr(), - )) + salsa::attach(sema.db, || { + extract_definitions_from_docs(&docs) + .into_iter() + .filter_map(|(range, link, ns)| { + doc_mapping + .map(range) + .filter(|(mapping, _)| mapping.file_id == src_file_id) + .and_then(|(InFile { value: mapped_range, .. }, attr_id)| { + Some(mapped_range).zip(resolve_doc_path_for_def( + sema.db, + def, + &link, + ns, + attr_id.is_inner_attr(), + )) + }) + }) + .for_each(|(range, def)| { + hl.add(HlRange { + range, + highlight: module_def_to_hl_tag(def) + | HlMod::Documentation + | HlMod::Injected + | HlMod::IntraDocLink, + binding_hash: None, }) - }) - .for_each(|(range, def)| { - hl.add(HlRange { - range, - highlight: module_def_to_hl_tag(def) - | HlMod::Documentation - | HlMod::Injected - | HlMod::IntraDocLink, - binding_hash: None, }) - }); + }); } // Extract doc-test sources from the docs and calculate highlighting for them. @@ -247,6 +251,7 @@ pub(super) fn doc_comment( db, HighlightConfig { syntactic_name_ref_highlighting: true, + comments: true, punctuation: true, operator: true, strings: true, @@ -311,7 +316,6 @@ fn module_def_to_hl_tag(def: Definition) -> HlTag { Definition::Const(_) => SymbolKind::Const, Definition::Static(_) => SymbolKind::Static, Definition::Trait(_) => SymbolKind::Trait, - Definition::TraitAlias(_) => SymbolKind::TraitAlias, Definition::TypeAlias(_) => SymbolKind::TypeAlias, Definition::BuiltinLifetime(_) => SymbolKind::LifetimeParam, Definition::BuiltinType(_) => return HlTag::BuiltinType, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs index 3b5d1af0ac72a..4b8762640c743 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs @@ -160,7 +160,6 @@ impl HlTag { SymbolKind::Struct => "struct", SymbolKind::ToolModule => "tool_module", SymbolKind::Trait => "trait", - SymbolKind::TraitAlias => "trait_alias", SymbolKind::TypeAlias => "type_alias", SymbolKind::TypeParam => "type_param", SymbolKind::Union => "union", diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_comments_disabled.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_comments_disabled.html new file mode 100644 index 0000000000000..4607448bebaaa --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_comments_disabled.html @@ -0,0 +1,48 @@ + + +