diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs index fe3792ef6c1e..f1973a20714a 100644 --- a/crates/base-db/src/change.rs +++ b/crates/base-db/src/change.rs @@ -34,7 +34,7 @@ impl fmt::Debug for Change { } impl Change { - pub fn new() -> Change { + pub fn new() -> Self { Change::default() } diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 27dfe766d335..d31340fe8f38 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -17,7 +17,7 @@ use rustc_hash::FxHashMap; use smallvec::SmallVec; use syntax::{ ast::{ - self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasLoopBody, HasName, + self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, SlicePatComponents, }, AstNode, AstPtr, SyntaxNodePtr, @@ -302,16 +302,29 @@ impl ExprCollector<'_> { self.alloc_expr(Expr::For { iterable, pat, body, label }, syntax_ptr) } ast::Expr::CallExpr(e) => { - let callee = self.collect_expr_opt(e.expr()); - let args = if let Some(arg_list) = e.arg_list() { - arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect() - } else { - Box::default() + let is_rustc_box = { + let attrs = e.attrs(); + attrs.filter_map(|x| x.as_simple_atom()).any(|x| x == "rustc_box") }; - self.alloc_expr( - Expr::Call { callee, args, is_assignee_expr: self.is_lowering_assignee_expr }, - syntax_ptr, - ) + if is_rustc_box { + let expr = self.collect_expr_opt(e.arg_list().and_then(|x| x.args().next())); + self.alloc_expr(Expr::Box { expr }, syntax_ptr) + } else { + let callee = self.collect_expr_opt(e.expr()); + let args = if let Some(arg_list) = e.arg_list() { + arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect() + } else { + Box::default() + }; + self.alloc_expr( + Expr::Call { + callee, + args, + is_assignee_expr: self.is_lowering_assignee_expr, + }, + syntax_ptr, + ) + } } ast::Expr::MethodCallExpr(e) => { let receiver = self.collect_expr_opt(e.receiver()); diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index 97924569ccc0..e43c372f03af 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -6,7 +6,7 @@ use chalk_ir::{ cast::{Cast, CastTo, Caster}, fold::TypeFoldable, interner::HasInterner, - AdtId, DebruijnIndex, Scalar, + AdtId, DebruijnIndex, GenericArgData, Scalar, }; use hir_def::{ builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, DefWithBodyId, @@ -232,6 +232,28 @@ impl TyBuilder<()> { TyBuilder::new((), params, parent_subst) } + pub fn subst_for_closure( + db: &dyn HirDatabase, + parent: DefWithBodyId, + sig_ty: Ty, + ) -> Substitution { + Substitution::from_iter( + Interner, + parent + .as_generic_def_id() + .map(|p| { + generics(db.upcast(), p) + .placeholder_subst(db) + .iter(Interner) + .chain(iter::once(&GenericArgData::Ty(sig_ty).intern(Interner))) + .cloned() + .collect::>() + }) + .into_iter() + .flatten(), + ) + } + pub fn build(self) -> Substitution { let ((), subst) = self.build_internal(); subst diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index fb7d99711d94..e5be852bc68c 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -24,7 +24,7 @@ use crate::{ method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS}, to_assoc_type_id, to_chalk_trait_id, traits::ChalkContext, - utils::generics, + utils::{generics, ClosureSubst}, wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause, @@ -337,7 +337,7 @@ impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { _closure_id: chalk_ir::ClosureId, substs: &chalk_ir::Substitution, ) -> chalk_ir::Binders> { - let sig_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone(); + let sig_ty = ClosureSubst(substs).sig_ty(); let sig = &sig_ty.callable_sig(self.db).expect("first closure param should be fn ptr"); let io = rust_ir::FnDefInputsAndOutputDatum { argument_types: sig.params().to_vec(), diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index d6a561248565..33c598fe3641 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -1,20 +1,22 @@ //! Various extensions traits for Chalk types. -use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy}; +use chalk_ir::{cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy}; use hir_def::{ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint}, generics::TypeOrConstParamData, lang_item::LangItem, type_ref::Rawness, - FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId, + DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId, }; use crate::{ - db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, - from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders, - CallableDefId, CallableSig, ClosureId, DynTy, FnPointer, ImplTraitId, Interner, Lifetime, - ProjectionTy, QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, - WhereClause, + db::HirDatabase, + from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, + to_chalk_trait_id, + utils::{generics, ClosureSubst}, + AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, + ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy, + QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause, }; pub trait TyExt { @@ -46,6 +48,7 @@ pub trait TyExt { fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option>; fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option; + fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool; /// FIXME: Get rid of this, it's not a good abstraction fn equals_ctor(&self, other: &Ty) -> bool; @@ -185,10 +188,7 @@ impl TyExt for Ty { let sig = db.callable_item_signature(callable_def); Some(sig.substitute(Interner, parameters)) } - TyKind::Closure(.., substs) => { - let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner); - sig_param.callable_sig(db) - } + TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty().callable_sig(db), _ => None, } } @@ -327,6 +327,20 @@ impl TyExt for Ty { } } + fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool { + let crate_id = owner.module(db.upcast()).krate(); + let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else { + return false; + }; + let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build(); + let env = db.trait_environment_for_body(owner); + let goal = Canonical { + value: InEnvironment::new(&env.env, trait_ref.cast(Interner)), + binders: CanonicalVarKinds::empty(Interner), + }; + db.trait_solve(crate_id, None, goal).is_some() + } + fn equals_ctor(&self, other: &Ty) -> bool { match (self.kind(Interner), other.kind(Interner)) { (TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt == adt2, diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 78033b4e89b5..80b72768b3c3 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -7,7 +7,7 @@ use hir_def::{ path::Path, resolver::{Resolver, ValueNs}, type_ref::ConstRef, - ConstId, EnumVariantId, + DefWithBodyId, EnumVariantId, }; use la_arena::{Idx, RawIdx}; use stdx::never; @@ -57,7 +57,7 @@ pub enum ConstEvalError { impl From for ConstEvalError { fn from(value: MirLowerError) -> Self { match value { - MirLowerError::ConstEvalError(e) => *e, + MirLowerError::ConstEvalError(_, e) => *e, _ => ConstEvalError::MirLowerError(value), } } @@ -168,7 +168,7 @@ pub fn try_const_usize(c: &Const) -> Option { pub(crate) fn const_eval_recover( _: &dyn HirDatabase, _: &[String], - _: &ConstId, + _: &DefWithBodyId, _: &Substitution, ) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) @@ -184,10 +184,9 @@ pub(crate) fn const_eval_discriminant_recover( pub(crate) fn const_eval_query( db: &dyn HirDatabase, - const_id: ConstId, + def: DefWithBodyId, subst: Substitution, ) -> Result { - let def = const_id.into(); let body = db.mir_body(def)?; let c = interpret_mir(db, &body, subst, false)?; Ok(c) diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index d987f41c7064..e95e17553631 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -1,4 +1,4 @@ -use base_db::fixture::WithFixture; +use base_db::{fixture::WithFixture, FileId}; use chalk_ir::Substitution; use hir_def::db::DefDatabase; @@ -16,7 +16,7 @@ mod intrinsics; fn simplify(e: ConstEvalError) -> ConstEvalError { match e { - ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e)) => { + ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e, _, _)) => { simplify(ConstEvalError::MirEvalError(*e)) } _ => e, @@ -24,13 +24,30 @@ fn simplify(e: ConstEvalError) -> ConstEvalError { } #[track_caller] -fn check_fail(ra_fixture: &str, error: ConstEvalError) { - assert_eq!(eval_goal(ra_fixture).map_err(simplify), Err(error)); +fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) { + let (db, file_id) = TestDB::with_single_file(ra_fixture); + match eval_goal(&db, file_id).map_err(simplify) { + Ok(_) => panic!("Expected fail, but it succeeded"), + Err(e) => assert!(error(e)), + } } #[track_caller] fn check_number(ra_fixture: &str, answer: i128) { - let r = eval_goal(ra_fixture).unwrap(); + let (db, file_id) = TestDB::with_single_file(ra_fixture); + let r = match eval_goal(&db, file_id) { + Ok(t) => t, + Err(e) => { + let mut err = String::new(); + let span_formatter = |file, range| format!("{:?} {:?}", file, range); + match e { + ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter), + ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter), + } + .unwrap(); + panic!("Error in evaluating goal: {}", err); + } + }; match &r.data(Interner).value { chalk_ir::ConstValue::Concrete(c) => match &c.interned { ConstScalar::Bytes(b, _) => { @@ -47,10 +64,9 @@ fn check_number(ra_fixture: &str, answer: i128) { } } -fn eval_goal(ra_fixture: &str) -> Result { - let (db, file_id) = TestDB::with_single_file(ra_fixture); +fn eval_goal(db: &TestDB, file_id: FileId) -> Result { let module_id = db.module_for_file(file_id); - let def_map = module_id.def_map(&db); + let def_map = module_id.def_map(db); let scope = &def_map[module_id.local_id].scope; let const_id = scope .declarations() @@ -65,7 +81,7 @@ fn eval_goal(ra_fixture: &str) -> Result { _ => None, }) .unwrap(); - db.const_eval(const_id, Substitution::empty(Interner)) + db.const_eval(const_id.into(), Substitution::empty(Interner)) } #[test] @@ -303,6 +319,81 @@ fn overloaded_index() { ); } +#[test] +fn overloaded_binop() { + check_number( + r#" + //- minicore: add + enum Color { + Red, + Green, + Yellow, + } + + use Color::*; + + impl core::ops::Add for Color { + type Output = Color; + fn add(self, rhs: Color) -> Self::Output { + Yellow + } + } + + impl core::ops::AddAssign for Color { + fn add_assign(&mut self, rhs: Color) { + *self = Red; + } + } + + const GOAL: bool = { + let x = Red + Green; + let mut y = Green; + y += x; + x == Yellow && y == Red && Red + Green == Yellow && Red + Red == Yellow && Yellow + Green == Yellow + }; + "#, + 1, + ); + check_number( + r#" + //- minicore: add + impl core::ops::Add for usize { + type Output = usize; + fn add(self, rhs: usize) -> Self::Output { + self + rhs + } + } + + impl core::ops::AddAssign for usize { + fn add_assign(&mut self, rhs: usize) { + *self += rhs; + } + } + + #[lang = "shl"] + pub trait Shl { + type Output; + + fn shl(self, rhs: Rhs) -> Self::Output; + } + + impl Shl for usize { + type Output = usize; + + fn shl(self, rhs: u8) -> Self::Output { + self << rhs + } + } + + const GOAL: usize = { + let mut x = 10; + x += 20; + 2 + 2 + (x << 1u8) + };"#, + 64, + ); +} + #[test] fn function_call() { check_number( @@ -426,6 +517,16 @@ fn generic_fn() { "#, 12, ); + check_number( + r#" + const fn y(b: T) -> (T, ) { + let alloc = b; + (alloc, ) + } + const GOAL: u8 = y(2).0; + "#, + 2, + ); check_number( r#" //- minicore: coerce_unsized, index, slice @@ -590,6 +691,30 @@ fn loops() { "#, 8, ); + check_number( + r#" + //- minicore: add + const GOAL: u8 = { + let mut x = 0; + 'a: loop { + 'b: loop { + 'c: while x < 20 { + 'd: while x < 5 { + 'e: loop { + x += 1; + continue 'c; + }; + }; + x += 1; + }; + break 'a; + }; + } + x + }; + "#, + 20, + ); } #[test] @@ -790,10 +915,12 @@ fn path_pattern_matching() { use Season::*; + const MY_SEASON: Season = Summer; + const fn f(x: Season) -> i32 { match x { Spring => 1, - Summer => 2, + MY_SEASON => 2, Fall => 3, Winter => 4, } @@ -944,19 +1071,10 @@ fn function_param_patterns() { fn match_guards() { check_number( r#" - //- minicore: option, eq - impl PartialEq for Option { - fn eq(&self, other: &Rhs) -> bool { - match (self, other) { - (Some(x), Some(y)) => x == y, - (None, None) => true, - _ => false, - } - } - } + //- minicore: option fn f(x: Option) -> i32 { match x { - y if y == Some(42) => 42000, + y if let Some(42) = y => 42000, Some(y) => y, None => 10 } @@ -967,6 +1085,59 @@ fn match_guards() { ); } +#[test] +fn result_layout_niche_optimization() { + check_number( + r#" + //- minicore: option, result + const GOAL: i32 = match Some(2).ok_or(Some(2)) { + Ok(x) => x, + Err(_) => 1000, + }; + "#, + 2, + ); + check_number( + r#" + //- minicore: result + pub enum AlignmentEnum64 { + _Align1Shl0 = 1 << 0, + _Align1Shl1 = 1 << 1, + _Align1Shl2 = 1 << 2, + _Align1Shl3 = 1 << 3, + _Align1Shl4 = 1 << 4, + _Align1Shl5 = 1 << 5, + } + const GOAL: Result = { + let align = Err(()); + align + }; + "#, + 0, // It is 0 since result is niche encoded and 1 is valid for `AlignmentEnum64` + ); + check_number( + r#" + //- minicore: result + pub enum AlignmentEnum64 { + _Align1Shl0 = 1 << 0, + _Align1Shl1 = 1 << 1, + _Align1Shl2 = 1 << 2, + _Align1Shl3 = 1 << 3, + _Align1Shl4 = 1 << 4, + _Align1Shl5 = 1 << 5, + } + const GOAL: i32 = { + let align = Ok::<_, ()>(AlignmentEnum64::_Align1Shl0); + match align { + Ok(_) => 2, + Err(_) => 1, + } + }; + "#, + 2, + ); +} + #[test] fn options() { check_number( @@ -1147,6 +1318,16 @@ fn closures() { check_number( r#" //- minicore: fn, copy + const GOAL: i32 = { + let c: fn(i32) -> i32 = |x| 2 * x; + c(2) + c(10) + }; + "#, + 24, + ); + check_number( + r#" + //- minicore: fn, copy struct X(i32); impl X { fn mult(&mut self, n: i32) { @@ -1180,6 +1361,36 @@ fn closures() { ); } +#[test] +fn closure_and_impl_fn() { + check_number( + r#" + //- minicore: fn, copy + fn closure_wrapper i32>(c: F) -> impl FnOnce() -> F { + || c + } + + const GOAL: i32 = { + let y = 5; + let c = closure_wrapper(|| y); + c()() + }; + "#, + 5, + ); + check_number( + r#" + //- minicore: fn, copy + fn f T>(t: F) -> impl Fn() -> T { + move || t() + } + + const GOAL: i32 = f(|| 2)(); + "#, + 2, + ); +} + #[test] fn or_pattern() { check_number( @@ -1218,6 +1429,23 @@ fn or_pattern() { ); } +#[test] +fn function_pointer_in_constants() { + check_number( + r#" + struct Foo { + f: fn(u8) -> u8, + } + const FOO: Foo = Foo { f: add2 }; + fn add2(x: u8) -> u8 { + x + 2 + } + const GOAL: u8 = (FOO.f)(3); + "#, + 5, + ); +} + #[test] fn function_pointer() { check_number( @@ -1432,6 +1660,51 @@ fn dyn_trait() { ); } +#[test] +fn boxes() { + check_number( + r#" +//- minicore: coerce_unsized, deref_mut, slice +use core::ops::{Deref, DerefMut}; +use core::{marker::Unsize, ops::CoerceUnsized}; + +#[lang = "owned_box"] +pub struct Box { + inner: *mut T, +} +impl Box { + fn new(t: T) -> Self { + #[rustc_box] + Box::new(t) + } +} + +impl Deref for Box { + type Target = T; + + fn deref(&self) -> &T { + &**self + } +} + +impl DerefMut for Box { + fn deref_mut(&mut self) -> &mut T { + &mut **self + } +} + +impl, U: ?Sized> CoerceUnsized> for Box {} + +const GOAL: usize = { + let x = Box::new(5); + let y: Box<[i32]> = Box::new([1, 2, 3]); + *x + y.len() +}; +"#, + 8, + ); +} + #[test] fn array_and_index() { check_number( @@ -1504,6 +1777,37 @@ fn consts() { ); } +#[test] +fn statics() { + check_number( + r#" + //- minicore: cell + use core::cell::Cell; + fn f() -> i32 { + static S: Cell = Cell::new(10); + S.set(S.get() + 1); + S.get() + } + const GOAL: i32 = f() + f() + f(); + "#, + 36, + ); +} + +#[test] +fn extern_weak_statics() { + check_number( + r#" + extern "C" { + #[linkage = "extern_weak"] + static __dso_handle: *mut u8; + } + const GOAL: usize = __dso_handle as usize; + "#, + 0, + ); +} + #[test] fn enums() { check_number( @@ -1531,13 +1835,13 @@ fn enums() { "#, 0, ); - let r = eval_goal( + let (db, file_id) = TestDB::with_single_file( r#" enum E { A = 1, B } const GOAL: E = E::A; "#, - ) - .unwrap(); + ); + let r = eval_goal(&db, file_id).unwrap(); assert_eq!(try_const_usize(&r), Some(1)); } @@ -1550,7 +1854,7 @@ fn const_loop() { const F2: i32 = 2 * F1; const GOAL: i32 = F3; "#, - ConstEvalError::MirLowerError(MirLowerError::Loop), + |e| e == ConstEvalError::MirLowerError(MirLowerError::Loop), ); } @@ -1609,8 +1913,7 @@ fn const_generic_subst_assoc_const_impl() { #[test] fn const_trait_assoc() { - // FIXME: this should evaluate to 0 - check_fail( + check_number( r#" struct U0; trait ToConst { @@ -1619,9 +1922,35 @@ fn const_trait_assoc() { impl ToConst for U0 { const VAL: usize = 0; } - const GOAL: usize = U0::VAL; + impl ToConst for i32 { + const VAL: usize = 32; + } + const GOAL: usize = U0::VAL + i32::VAL; "#, - ConstEvalError::MirLowerError(MirLowerError::IncompleteExpr), + 32, + ); + check_number( + r#" + struct S(*mut T); + + trait MySized: Sized { + const SIZE: S = S(1 as *mut Self); + } + + impl MySized for i32 { + const SIZE: S = S(10 as *mut i32); + } + + impl MySized for i64 { + } + + const fn f() -> usize { + T::SIZE.0 as usize + } + + const GOAL: usize = f::() + f::() * 2; + "#, + 12, ); } @@ -1631,7 +1960,7 @@ fn exec_limits() { r#" const GOAL: usize = loop {}; "#, - ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded), + |e| e == ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded), ); check_fail( r#" @@ -1640,7 +1969,7 @@ fn exec_limits() { } const GOAL: i32 = f(0); "#, - ConstEvalError::MirEvalError(MirEvalError::StackOverflow), + |e| e == ConstEvalError::MirEvalError(MirEvalError::StackOverflow), ); // Reasonable code should still work check_number( @@ -1665,7 +1994,7 @@ fn exec_limits() { #[test] fn type_error() { - let e = eval_goal( + check_fail( r#" const GOAL: u8 = { let x: u16 = 2; @@ -1673,6 +2002,6 @@ fn type_error() { y.0 }; "#, + |e| matches!(e, ConstEvalError::MirLowerError(MirLowerError::TypeMismatch(_))), ); - assert!(matches!(e, Err(ConstEvalError::MirLowerError(MirLowerError::TypeMismatch(_))))); } diff --git a/crates/hir-ty/src/consteval/tests/intrinsics.rs b/crates/hir-ty/src/consteval/tests/intrinsics.rs index 5c47e1f00a41..40e68823acdf 100644 --- a/crates/hir-ty/src/consteval/tests/intrinsics.rs +++ b/crates/hir-ty/src/consteval/tests/intrinsics.rs @@ -67,6 +67,135 @@ fn wrapping_add() { ); } +#[test] +fn allocator() { + check_number( + r#" + extern "Rust" { + #[rustc_allocator] + fn __rust_alloc(size: usize, align: usize) -> *mut u8; + #[rustc_deallocator] + fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); + #[rustc_reallocator] + fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; + #[rustc_allocator_zeroed] + fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; + } + + const GOAL: u8 = unsafe { + let ptr = __rust_alloc(4, 1); + let ptr2 = ((ptr as usize) + 1) as *mut u8; + *ptr = 23; + *ptr2 = 32; + let ptr = __rust_realloc(ptr, 4, 1, 8); + let ptr2 = ((ptr as usize) + 1) as *mut u8; + *ptr + *ptr2 + }; + "#, + 55, + ); +} + +#[test] +fn overflowing_add() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn add_with_overflow(x: T, y: T) -> (T, bool); + } + + const GOAL: u8 = add_with_overflow(1, 2).0; + "#, + 3, + ); + check_number( + r#" + extern "rust-intrinsic" { + pub fn add_with_overflow(x: T, y: T) -> (T, bool); + } + + const GOAL: u8 = add_with_overflow(1, 2).1 as u8; + "#, + 0, + ); +} + +#[test] +fn needs_drop() { + check_number( + r#" + //- minicore: copy, sized + extern "rust-intrinsic" { + pub fn needs_drop() -> bool; + } + struct X; + const GOAL: bool = !needs_drop::() && needs_drop::(); + "#, + 1, + ); +} + +#[test] +fn likely() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn likely(b: bool) -> bool; + pub fn unlikely(b: bool) -> bool; + } + + const GOAL: bool = likely(true) && unlikely(true) && !likely(false) && !unlikely(false); + "#, + 1, + ); +} + +#[test] +fn atomic() { + check_number( + r#" + //- minicore: copy + extern "rust-intrinsic" { + pub fn atomic_load_seqcst(src: *const T) -> T; + pub fn atomic_xchg_acquire(dst: *mut T, src: T) -> T; + pub fn atomic_cxchg_release_seqcst(dst: *mut T, old: T, src: T) -> (T, bool); + pub fn atomic_cxchgweak_acquire_acquire(dst: *mut T, old: T, src: T) -> (T, bool); + pub fn atomic_store_release(dst: *mut T, val: T); + pub fn atomic_xadd_acqrel(dst: *mut T, src: T) -> T; + pub fn atomic_xsub_seqcst(dst: *mut T, src: T) -> T; + pub fn atomic_and_acquire(dst: *mut T, src: T) -> T; + pub fn atomic_nand_seqcst(dst: *mut T, src: T) -> T; + pub fn atomic_or_release(dst: *mut T, src: T) -> T; + pub fn atomic_xor_seqcst(dst: *mut T, src: T) -> T; + } + + fn should_not_reach() { + _ // fails the test if executed + } + + const GOAL: i32 = { + let mut x = 5; + atomic_store_release(&mut x, 10); + let mut y = atomic_xchg_acquire(&mut x, 100); + atomic_xadd_acqrel(&mut y, 20); + if (30, true) != atomic_cxchg_release_seqcst(&mut y, 30, 40) { + should_not_reach(); + } + if (40, false) != atomic_cxchg_release_seqcst(&mut y, 30, 50) { + should_not_reach(); + } + if (40, true) != atomic_cxchgweak_acquire_acquire(&mut y, 40, 30) { + should_not_reach(); + } + let mut z = atomic_xsub_seqcst(&mut x, -200); + atomic_xor_seqcst(&mut x, 1024); + atomic_load_seqcst(&x) + z * 3 + atomic_load_seqcst(&y) * 2 + }; + "#, + 660 + 1024, + ); +} + #[test] fn offset() { check_number( diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index c415689399f1..963b3b72abb7 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use base_db::{impl_intern_key, salsa, CrateId, Upcast}; use hir_def::{ - db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstId, ConstParamId, + db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId, }; @@ -59,7 +59,7 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::consteval::const_eval_query)] #[salsa::cycle(crate::consteval::const_eval_recover)] - fn const_eval(&self, def: ConstId, subst: Substitution) -> Result; + fn const_eval(&self, def: DefWithBodyId, subst: Substitution) -> Result; #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)] #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)] diff --git a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs index 7460f43387f6..a0f6b9368ee0 100644 --- a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs +++ b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs @@ -772,7 +772,7 @@ impl<'p> Fields<'p> { (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| { let ty = field_ty[fid].clone().substitute(Interner, substs); - let ty = normalize(cx.db, cx.body, ty); + let ty = normalize(cx.db, cx.db.trait_environment_for_body(cx.body), ty); let is_visible = matches!(adt, hir_def::AdtId::EnumId(..)) || visibility[fid].is_visible_from(cx.db.upcast(), cx.module); let is_uninhabited = cx.is_uninhabited(&ty); diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index d445abae9232..3cfe78141d43 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -32,7 +32,7 @@ use crate::{ mapping::from_chalk, mir::pad16, primitive, to_assoc_type_id, - utils::{self, generics}, + utils::{self, generics, ClosureSubst}, AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue, DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar, @@ -419,6 +419,16 @@ impl HirDisplay for Const { } ConstValue::Concrete(c) => match &c.interned { ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty), + ConstScalar::UnevaluatedConst(c, parameters) => { + let const_data = f.db.const_data(*c); + write!( + f, + "{}", + const_data.name.as_ref().and_then(|x| x.as_str()).unwrap_or("_") + )?; + hir_fmt_generics(f, parameters, Some((*c).into()))?; + Ok(()) + } ConstScalar::Unknown => f.write_char('_'), }, } @@ -485,7 +495,7 @@ fn render_const_scalar( chalk_ir::TyKind::Ref(_, _, t) => match t.kind(Interner) { chalk_ir::TyKind::Str => { let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); - let bytes = memory_map.0.get(&addr).map(|x| &**x).unwrap_or(&[]); + let bytes = memory_map.memory.get(&addr).map(|x| &**x).unwrap_or(&[]); let s = std::str::from_utf8(bytes).unwrap_or(""); write!(f, "{s:?}") } @@ -574,6 +584,11 @@ fn render_const_scalar( hir_def::AdtId::EnumId(_) => f.write_str(""), }, chalk_ir::TyKind::FnDef(..) => ty.hir_fmt(f), + chalk_ir::TyKind::Raw(_, _) => { + let x = u128::from_le_bytes(pad16(b, false)); + write!(f, "{:#X} as ", x)?; + ty.hir_fmt(f) + } _ => f.write_str(""), } } @@ -794,82 +809,9 @@ impl HirDisplay for Ty { } f.end_location_link(); - if parameters.len(Interner) > 0 { - let parameters_to_write = if f.display_target.is_source_code() - || f.omit_verbose_types() - { - match self - .as_generic_def(db) - .map(|generic_def_id| db.generic_defaults(generic_def_id)) - .filter(|defaults| !defaults.is_empty()) - { - None => parameters.as_slice(Interner), - Some(default_parameters) => { - fn should_show( - parameter: &GenericArg, - default_parameters: &[Binders], - i: usize, - parameters: &Substitution, - ) -> bool { - if parameter.ty(Interner).map(|x| x.kind(Interner)) - == Some(&TyKind::Error) - { - return true; - } - if let Some(ConstValue::Concrete(c)) = parameter - .constant(Interner) - .map(|x| &x.data(Interner).value) - { - if c.interned == ConstScalar::Unknown { - return true; - } - } - let default_parameter = match default_parameters.get(i) { - Some(x) => x, - None => return true, - }; - let actual_default = - default_parameter.clone().substitute(Interner, ¶meters); - parameter != &actual_default - } - let mut default_from = 0; - for (i, parameter) in parameters.iter(Interner).enumerate() { - if should_show(parameter, &default_parameters, i, parameters) { - default_from = i + 1; - } - } - ¶meters.as_slice(Interner)[0..default_from] - } - } - } else { - parameters.as_slice(Interner) - }; - if !parameters_to_write.is_empty() { - write!(f, "<")?; - - if f.display_target.is_source_code() { - let mut first = true; - for generic_arg in parameters_to_write { - if !first { - write!(f, ", ")?; - } - first = false; - - if generic_arg.ty(Interner).map(|ty| ty.kind(Interner)) - == Some(&TyKind::Error) - { - write!(f, "_")?; - } else { - generic_arg.hir_fmt(f)?; - } - } - } else { - f.write_joined(parameters_to_write, ", ")?; - } + let generic_def = self.as_generic_def(db); - write!(f, ">")?; - } - } + hir_fmt_generics(f, parameters, generic_def)?; } TyKind::AssociatedType(assoc_type_id, parameters) => { let type_alias = from_assoc_type_id(*assoc_type_id); @@ -983,7 +925,7 @@ impl HirDisplay for Ty { } _ => (), } - let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(db); + let sig = ClosureSubst(substs).sig_ty().callable_sig(db); if let Some(sig) = sig { let (def, _) = db.lookup_intern_closure((*id).into()); let infer = db.infer(def); @@ -1141,6 +1083,85 @@ impl HirDisplay for Ty { } } +fn hir_fmt_generics( + f: &mut HirFormatter<'_>, + parameters: &Substitution, + generic_def: Option, +) -> Result<(), HirDisplayError> { + let db = f.db; + if parameters.len(Interner) > 0 { + let parameters_to_write = if f.display_target.is_source_code() || f.omit_verbose_types() { + match generic_def + .map(|generic_def_id| db.generic_defaults(generic_def_id)) + .filter(|defaults| !defaults.is_empty()) + { + None => parameters.as_slice(Interner), + Some(default_parameters) => { + fn should_show( + parameter: &GenericArg, + default_parameters: &[Binders], + i: usize, + parameters: &Substitution, + ) -> bool { + if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error) + { + return true; + } + if let Some(ConstValue::Concrete(c)) = + parameter.constant(Interner).map(|x| &x.data(Interner).value) + { + if c.interned == ConstScalar::Unknown { + return true; + } + } + let default_parameter = match default_parameters.get(i) { + Some(x) => x, + None => return true, + }; + let actual_default = + default_parameter.clone().substitute(Interner, ¶meters); + parameter != &actual_default + } + let mut default_from = 0; + for (i, parameter) in parameters.iter(Interner).enumerate() { + if should_show(parameter, &default_parameters, i, parameters) { + default_from = i + 1; + } + } + ¶meters.as_slice(Interner)[0..default_from] + } + } + } else { + parameters.as_slice(Interner) + }; + if !parameters_to_write.is_empty() { + write!(f, "<")?; + + if f.display_target.is_source_code() { + let mut first = true; + for generic_arg in parameters_to_write { + if !first { + write!(f, ", ")?; + } + first = false; + + if generic_arg.ty(Interner).map(|ty| ty.kind(Interner)) == Some(&TyKind::Error) + { + write!(f, "_")?; + } else { + generic_arg.hir_fmt(f)?; + } + } + } else { + f.write_joined(parameters_to_write, ", ")?; + } + + write!(f, ">")?; + } + } + Ok(()) +} + impl HirDisplay for CallableSig { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write!(f, "fn(")?; diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 4affe7424e1f..f9118d987270 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -44,7 +44,7 @@ use crate::{ db::HirDatabase, fold_tys, infer::coerce::CoerceMany, lower::ImplTraitLoweringMode, static_lifetime, to_assoc_type_id, traits::FnTrait, AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, Substitution, - TraitRef, Ty, TyBuilder, TyExt, TyKind, + TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; // This lint has a false positive here. See the link below for details. @@ -117,11 +117,10 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc Ty { +pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc, ty: Ty) -> Ty { if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) { return ty; } - let trait_env = db.trait_environment_for_body(owner); let mut table = unify::InferenceTable::new(db, trait_env); let ty_with_vars = table.normalize_associated_types_in(ty); diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index e7eb967c0406..b03dd9d46e36 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -2,7 +2,11 @@ use std::{cmp, collections::HashMap, convert::Infallible, mem}; -use chalk_ir::{cast::Cast, AliasEq, AliasTy, FnSubst, Mutability, TyKind, WhereClause}; +use chalk_ir::{ + cast::Cast, + fold::{FallibleTypeFolder, TypeFoldable}, + AliasEq, AliasTy, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause, +}; use hir_def::{ hir::{ Array, BinaryOp, BindingAnnotation, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, @@ -10,7 +14,7 @@ use hir_def::{ }, lang_item::LangItem, resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, - FieldId, HasModule, VariantId, + DefWithBodyId, FieldId, HasModule, VariantId, }; use hir_expand::name; use rustc_hash::FxHashMap; @@ -18,12 +22,14 @@ use smallvec::SmallVec; use stdx::never; use crate::{ + db::HirDatabase, + from_placeholder_idx, make_binders, mir::{BorrowKind, MirSpan, ProjectionElem}, static_lifetime, to_chalk_trait_id, traits::FnTrait, - utils::{self, pattern_matching_dereference_count}, - Adjust, Adjustment, Canonical, CanonicalVarKinds, ChalkTraitId, ClosureId, DynTy, FnPointer, - FnSig, InEnvironment, Interner, Substitution, Ty, TyBuilder, TyExt, + utils::{self, generics, pattern_matching_dereference_count, Generics}, + Adjust, Adjustment, Binders, ChalkTraitId, ClosureId, DynTy, FnPointer, FnSig, Interner, + Substitution, Ty, TyExt, }; use super::{Expectation, InferenceContext}; @@ -115,7 +121,7 @@ impl HirPlace { fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty { let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone()); for p in &self.projections { - ty = p.projected_ty(ty, ctx.db, |_, _| { + ty = p.projected_ty(ty, ctx.db, |_, _, _| { unreachable!("Closure field only happens in MIR"); }); } @@ -150,7 +156,7 @@ pub(crate) struct CapturedItem { pub(crate) place: HirPlace, pub(crate) kind: CaptureKind, pub(crate) span: MirSpan, - pub(crate) ty: Ty, + pub(crate) ty: Binders, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -162,6 +168,52 @@ pub(crate) struct CapturedItemWithoutTy { impl CapturedItemWithoutTy { fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem { + fn replace_placeholder_with_binder( + db: &dyn HirDatabase, + owner: DefWithBodyId, + ty: Ty, + ) -> Binders { + struct Filler<'a> { + db: &'a dyn HirDatabase, + generics: Generics, + } + impl FallibleTypeFolder for Filler<'_> { + type Error = (); + + fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder { + self + } + + fn interner(&self) -> Interner { + Interner + } + + fn try_fold_free_placeholder_ty( + &mut self, + idx: chalk_ir::PlaceholderIndex, + _outer_binder: DebruijnIndex, + ) -> std::result::Result { + let x = from_placeholder_idx(self.db, idx); + let Some(idx) = self.generics.param_idx(x) else { + return Err(()); + }; + Ok(TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx)) + .intern(Interner)) + } + } + let g_def = match owner { + DefWithBodyId::FunctionId(f) => Some(f.into()), + DefWithBodyId::StaticId(_) => None, + DefWithBodyId::ConstId(f) => Some(f.into()), + DefWithBodyId::VariantId(f) => Some(f.into()), + }; + let Some(generics) = g_def.map(|g_def| generics(db.upcast(), g_def)) else { + return Binders::empty(Interner, ty); + }; + let filler = &mut Filler { db, generics }; + let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty); + make_binders(db, &filler.generics, result) + } let ty = self.place.ty(ctx).clone(); let ty = match &self.kind { CaptureKind::ByValue => ty, @@ -173,7 +225,12 @@ impl CapturedItemWithoutTy { TyKind::Ref(m, static_lifetime(), ty).intern(Interner) } }; - CapturedItem { place: self.place, kind: self.kind, span: self.span, ty } + CapturedItem { + place: self.place, + kind: self.kind, + span: self.span, + ty: replace_placeholder_with_binder(ctx.db, ctx.owner, ty), + } } } @@ -520,17 +577,7 @@ impl InferenceContext<'_> { // without creating query cycles. return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true); } - let crate_id = self.owner.module(self.db.upcast()).krate(); - let Some(copy_trait) = self.db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else { - return false; - }; - let trait_ref = TyBuilder::trait_ref(self.db, copy_trait).push(ty).build(); - let env = self.db.trait_environment_for_body(self.owner); - let goal = Canonical { - value: InEnvironment::new(&env.env, trait_ref.cast(Interner)), - binders: CanonicalVarKinds::empty(Interner), - }; - self.db.trait_solve(crate_id, None, goal).is_some() + ty.is_copy(self.db, self.owner) } fn select_from_expr(&mut self, expr: ExprId) { diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index 2249d84edbfc..d613a5ff5049 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs @@ -21,8 +21,10 @@ use crate::{ Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast, TypeError, TypeMismatch, }, - static_lifetime, Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner, - Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, + static_lifetime, + utils::ClosureSubst, + Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution, + Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, }; use super::unify::InferenceTable; @@ -670,7 +672,7 @@ impl<'a> InferenceTable<'a> { } fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty { - let closure_sig = closure_substs.at(Interner, 0).assert_ty_ref(Interner).clone(); + let closure_sig = ClosureSubst(closure_substs).sig_ty().clone(); match closure_sig.kind(Interner) { TyKind::Function(fn_ty) => TyKind::Function(FnPointer { num_binders: fn_ty.num_binders, diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 2c37d25f54b8..06ca9b026e37 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -282,7 +282,7 @@ impl<'a> InferenceContext<'a> { let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into(); let closure_ty = TyKind::Closure( closure_id, - Substitution::from1(Interner, sig_ty.clone()), + TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()), ) .intern(Interner); self.deferred_closures.entry(closure_id).or_default(); diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 512836afbf36..ebbf16caee1e 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -1,5 +1,7 @@ //! Compute the binary representation of a type +use std::sync::Arc; + use base_db::CrateId; use chalk_ir::{AdtId, TyKind}; use hir_def::{ @@ -13,8 +15,8 @@ use la_arena::{Idx, RawIdx}; use stdx::never; use crate::{ - consteval::try_const_usize, db::HirDatabase, layout::adt::struct_variant_idx, Interner, - Substitution, Ty, + consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx, + utils::ClosureSubst, Interner, Substitution, TraitEnvironment, Ty, }; pub use self::{ @@ -80,6 +82,8 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result db.layout_of_adt(*def, subst.clone())?, TyKind::Scalar(s) => match s { @@ -146,7 +150,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result { let count = try_const_usize(&count).ok_or(LayoutError::UserError( - "mismatched type of const generic parameter".to_string(), + "unevaluated or mistyped const generic parameter".to_string(), ))? as u64; let element = layout_of_ty(db, element, krate)?; let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?; @@ -252,13 +256,19 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result { + TyKind::Closure(c, subst) => { let (def, _) = db.lookup_intern_closure((*c).into()); let infer = db.infer(def); let (captures, _) = infer.closure_info(c); let fields = captures .iter() - .map(|x| layout_of_ty(db, &x.ty, krate)) + .map(|x| { + layout_of_ty( + db, + &x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()), + krate, + ) + }) .collect::, _>>()?; let fields = fields.iter().collect::>(); let fields = fields.iter().collect::>(); diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index 3cd0cd399c4d..1502ab14cc7c 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -218,6 +218,22 @@ fn generic() { } } +#[test] +fn associated_types() { + size_and_align! { + trait Tr { + type Ty; + } + + impl Tr for i32 { + type Ty = i64; + } + + struct Foo(::Ty); + struct Goal(Foo); + } +} + #[test] fn return_position_impl_trait() { size_and_align_expr! { diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 03536be88475..8ce7aaebed36 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -44,10 +44,10 @@ use chalk_ir::{ NoSolution, TyData, }; use either::Either; -use hir_def::{hir::ExprId, type_ref::Rawness, TypeOrConstParamId}; +use hir_def::{hir::ExprId, type_ref::Rawness, ConstId, TypeOrConstParamId}; use hir_expand::name; use la_arena::{Arena, Idx}; -use mir::MirEvalError; +use mir::{MirEvalError, VTableMap}; use rustc_hash::FxHashSet; use traits::FnTrait; use utils::Generics; @@ -151,11 +151,14 @@ pub type WhereClause = chalk_ir::WhereClause; /// the necessary bits of memory of the const eval session to keep the constant /// meaningful. #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct MemoryMap(pub HashMap>); +pub struct MemoryMap { + pub memory: HashMap>, + pub vtable: VTableMap, +} impl MemoryMap { fn insert(&mut self, addr: usize, x: Vec) { - self.0.insert(addr, x); + self.memory.insert(addr, x); } /// This functions convert each address by a function `f` which gets the byte intervals and assign an address @@ -165,7 +168,7 @@ impl MemoryMap { &self, mut f: impl FnMut(&[u8]) -> Result, ) -> Result, MirEvalError> { - self.0.iter().map(|x| Ok((*x.0, f(x.1)?))).collect() + self.memory.iter().map(|x| Ok((*x.0, f(x.1)?))).collect() } } @@ -173,6 +176,9 @@ impl MemoryMap { #[derive(Debug, Clone, PartialEq, Eq)] pub enum ConstScalar { Bytes(Vec, MemoryMap), + // FIXME: this is a hack to get around chalk not being able to represent unevaluatable + // constants + UnevaluatedConst(ConstId, Substitution), /// Case of an unknown value that rustc might know but we don't // FIXME: this is a hack to get around chalk not being able to represent unevaluatable // constants diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs index fef00771199a..4846bbfe5fd7 100644 --- a/crates/hir-ty/src/mir.rs +++ b/crates/hir-ty/src/mir.rs @@ -3,13 +3,14 @@ use std::{fmt::Display, iter}; use crate::{ - db::HirDatabase, display::HirDisplay, infer::PointerCast, lang_items::is_box, ClosureId, Const, - ConstScalar, InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind, + db::HirDatabase, display::HirDisplay, infer::PointerCast, lang_items::is_box, mapping::ToChalk, + CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap, + Substitution, Ty, TyKind, }; use chalk_ir::Mutability; use hir_def::{ hir::{BindingId, Expr, ExprId, Ordering, PatId}, - DefWithBodyId, FieldId, UnionId, VariantId, + DefWithBodyId, FieldId, StaticId, UnionId, VariantId, }; use la_arena::{Arena, ArenaMap, Idx, RawIdx}; @@ -19,7 +20,7 @@ mod borrowck; mod pretty; pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason}; -pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError}; +pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap}; pub use lower::{ lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError, }; @@ -76,6 +77,9 @@ pub enum Operand { Move(Place), /// Constants are already semantically values, and remain unchanged. Constant(Const), + /// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc + /// handles it with the `Constant` variant somehow. + Static(StaticId), } impl Operand { @@ -90,6 +94,17 @@ impl Operand { fn const_zst(ty: Ty) -> Operand { Self::from_bytes(vec![], ty) } + + fn from_fn( + db: &dyn HirDatabase, + func_id: hir_def::FunctionId, + generic_args: Substitution, + ) -> Operand { + let ty = + chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args) + .intern(Interner); + Operand::from_bytes(vec![], ty) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -110,7 +125,7 @@ impl ProjectionElem { &self, base: Ty, db: &dyn HirDatabase, - closure_field: impl FnOnce(ClosureId, usize) -> Ty, + closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty, ) -> Ty { match self { ProjectionElem::Deref => match &base.data(Interner).kind { @@ -142,7 +157,7 @@ impl ProjectionElem { never!("Out of bound tuple field"); TyKind::Error.intern(Interner) }), - TyKind::Closure(id, _) => closure_field(*id, *f), + TyKind::Closure(id, subst) => closure_field(*id, subst, *f), _ => { never!("Only tuple or closure has tuple or closure field"); return TyKind::Error.intern(Interner); @@ -261,7 +276,13 @@ impl SwitchTargets { } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum Terminator { +pub struct Terminator { + span: MirSpan, + kind: TerminatorKind, +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum TerminatorKind { /// Block has one successor; we continue execution there. Goto { target: BasicBlockId }, @@ -836,6 +857,9 @@ pub enum Rvalue { /// affects alias analysis. ShallowInitBox(Operand, Ty), + /// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer. + ShallowInitBoxWithAlloc(Ty), + /// A CopyForDeref is equivalent to a read from a place at the /// codegen level, but is treated specially by drop elaboration. When such a read happens, it /// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator) @@ -918,7 +942,7 @@ impl MirBody { Operand::Copy(p) | Operand::Move(p) => { f(p); } - Operand::Constant(_) => (), + Operand::Constant(_) | Operand::Static(_) => (), } } for (_, block) in self.basic_blocks.iter_mut() { @@ -927,6 +951,7 @@ impl MirBody { StatementKind::Assign(p, r) => { f(p); match r { + Rvalue::ShallowInitBoxWithAlloc(_) => (), Rvalue::ShallowInitBox(o, _) | Rvalue::UnaryOp(_, o) | Rvalue::Cast(_, o, _) @@ -954,32 +979,32 @@ impl MirBody { } } match &mut block.terminator { - Some(x) => match x { - Terminator::SwitchInt { discr, .. } => for_operand(discr, &mut f), - Terminator::FalseEdge { .. } - | Terminator::FalseUnwind { .. } - | Terminator::Goto { .. } - | Terminator::Resume - | Terminator::GeneratorDrop - | Terminator::Abort - | Terminator::Return - | Terminator::Unreachable => (), - Terminator::Drop { place, .. } => { + Some(x) => match &mut x.kind { + TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, &mut f), + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::Resume + | TerminatorKind::GeneratorDrop + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable => (), + TerminatorKind::Drop { place, .. } => { f(place); } - Terminator::DropAndReplace { place, value, .. } => { + TerminatorKind::DropAndReplace { place, value, .. } => { f(place); for_operand(value, &mut f); } - Terminator::Call { func, args, destination, .. } => { + TerminatorKind::Call { func, args, destination, .. } => { for_operand(func, &mut f); args.iter_mut().for_each(|x| for_operand(x, &mut f)); f(destination); } - Terminator::Assert { cond, .. } => { + TerminatorKind::Assert { cond, .. } => { for_operand(cond, &mut f); } - Terminator::Yield { value, resume_arg, .. } => { + TerminatorKind::Yield { value, resume_arg, .. } => { for_operand(value, &mut f); f(resume_arg); } diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index 016090d509bb..7b99fbf42f01 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -13,7 +13,7 @@ use crate::{db::HirDatabase, ClosureId}; use super::{ BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, Place, ProjectionElem, - Rvalue, StatementKind, Terminator, + Rvalue, StatementKind, TerminatorKind, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -141,26 +141,26 @@ fn ever_initialized_map(body: &MirBody) -> ArenaMap vec![*target], - Terminator::SwitchInt { targets, .. } => targets.all_targets().to_vec(), - Terminator::Resume - | Terminator::Abort - | Terminator::Return - | Terminator::Unreachable => vec![], - Terminator::Call { target, cleanup, destination, .. } => { + let targets = match &terminator.kind { + TerminatorKind::Goto { target } => vec![*target], + TerminatorKind::SwitchInt { targets, .. } => targets.all_targets().to_vec(), + TerminatorKind::Resume + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable => vec![], + TerminatorKind::Call { target, cleanup, destination, .. } => { if destination.projection.len() == 0 && destination.local == l { is_ever_initialized = true; } target.into_iter().chain(cleanup.into_iter()).copied().collect() } - Terminator::Drop { .. } - | Terminator::DropAndReplace { .. } - | Terminator::Assert { .. } - | Terminator::Yield { .. } - | Terminator::GeneratorDrop - | Terminator::FalseEdge { .. } - | Terminator::FalseUnwind { .. } => { + TerminatorKind::Drop { .. } + | TerminatorKind::DropAndReplace { .. } + | TerminatorKind::Assert { .. } + | TerminatorKind::Yield { .. } + | TerminatorKind::GeneratorDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => { never!("We don't emit these MIR terminators yet"); vec![] } @@ -228,21 +228,21 @@ fn mutability_of_locals(body: &MirBody) -> ArenaMap { never!("Terminator should be none only in construction"); continue; }; - match terminator { - Terminator::Goto { .. } - | Terminator::Resume - | Terminator::Abort - | Terminator::Return - | Terminator::Unreachable - | Terminator::FalseEdge { .. } - | Terminator::FalseUnwind { .. } - | Terminator::GeneratorDrop - | Terminator::SwitchInt { .. } - | Terminator::Drop { .. } - | Terminator::DropAndReplace { .. } - | Terminator::Assert { .. } - | Terminator::Yield { .. } => (), - Terminator::Call { destination, .. } => { + match &terminator.kind { + TerminatorKind::Goto { .. } + | TerminatorKind::Resume + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::GeneratorDrop + | TerminatorKind::SwitchInt { .. } + | TerminatorKind::Drop { .. } + | TerminatorKind::DropAndReplace { .. } + | TerminatorKind::Assert { .. } + | TerminatorKind::Yield { .. } => (), + TerminatorKind::Call { destination, .. } => { if destination.projection.len() == 0 { if ever_init_map.get(destination.local).copied().unwrap_or_default() { push_mut_span(destination.local, MirSpan::Unknown); diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index bb33da97e334..01d27f2672c2 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1,39 +1,49 @@ //! This module provides a MIR interpreter, which is used in const eval. -use std::{borrow::Cow, collections::HashMap, iter, ops::Range, sync::Arc}; +use std::{borrow::Cow, collections::HashMap, fmt::Write, iter, ops::Range, sync::Arc}; -use base_db::CrateId; +use base_db::{CrateId, FileId}; use chalk_ir::{ fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable}, - DebruijnIndex, TyKind, + DebruijnIndex, Mutability, }; +use either::Either; use hir_def::{ builtin_type::BuiltinType, lang_item::{lang_attr, LangItem}, layout::{TagEncoding, Variants}, - AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, VariantId, + AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, + TypeOrConstParamId, VariantId, }; +use hir_expand::{name::Name, InFile}; use intern::Interned; use la_arena::ArenaMap; +use rustc_hash::FxHashMap; +use syntax::{SyntaxNodePtr, TextRange}; use crate::{ consteval::{intern_const_scalar, ConstEvalError}, db::HirDatabase, + display::{ClosureStyle, HirDisplay}, from_placeholder_idx, infer::{normalize, PointerCast}, layout::{layout_of_ty, Layout, LayoutError, RustcEnumVariantIdx}, mapping::from_chalk, - method_resolution::{is_dyn_method, lookup_impl_method}, + method_resolution::{is_dyn_method, lookup_impl_const, lookup_impl_method}, + static_lifetime, traits::FnTrait, + utils::{generics, ClosureSubst, Generics}, CallableDefId, ClosureId, Const, ConstScalar, FnDefId, GenericArgData, Interner, MemoryMap, - Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, + Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, }; use super::{ const_as_usize, return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError, - Operand, Place, ProjectionElem, Rvalue, StatementKind, Terminator, UnOp, + MirSpan, Operand, Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp, }; +mod shim; + macro_rules! from_bytes { ($ty:tt, $value:expr) => { ($ty::from_le_bytes(match ($value).try_into() { @@ -43,9 +53,15 @@ macro_rules! from_bytes { }; } -#[derive(Debug, Default)] -struct VTableMap { - ty_to_id: HashMap, +macro_rules! not_supported { + ($x: expr) => { + return Err(MirEvalError::NotSupported(format!($x))) + }; +} + +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub struct VTableMap { + ty_to_id: FxHashMap, id_to_ty: Vec, } @@ -75,6 +91,9 @@ pub struct Evaluator<'a> { trait_env: Arc, stack: Vec, heap: Vec, + /// Stores the global location of the statics. We const evaluate every static first time we need it + /// and see it's missing, then we add it to this to reuse. + static_locations: FxHashMap, /// We don't really have function pointers, i.e. pointers to some assembly instructions that we can run. Instead, we /// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the /// time of use. @@ -88,7 +107,7 @@ pub struct Evaluator<'a> { stack_depth_limit: usize, } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum Address { Stack(usize), Heap(usize), @@ -153,6 +172,13 @@ enum IntervalOrOwned { Owned(Vec), Borrowed(Interval), } + +impl From for IntervalOrOwned { + fn from(it: Interval) -> IntervalOrOwned { + IntervalOrOwned::Borrowed(it) + } +} + impl IntervalOrOwned { pub(crate) fn to_vec(self, memory: &Evaluator<'_>) -> Result> { Ok(match self { @@ -160,6 +186,13 @@ impl IntervalOrOwned { IntervalOrOwned::Borrowed(b) => b.get(memory)?.to_vec(), }) } + + fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { + Ok(match self { + IntervalOrOwned::Owned(o) => o, + IntervalOrOwned::Borrowed(b) => b.get(memory)?, + }) + } } impl Address { @@ -205,30 +238,129 @@ impl Address { #[derive(Clone, PartialEq, Eq)] pub enum MirEvalError { - ConstEvalError(Box), + ConstEvalError(String, Box), LayoutError(LayoutError, Ty), /// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place. TypeError(&'static str), /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// then use this type of error. - UndefinedBehavior(&'static str), + UndefinedBehavior(String), + GenericArgNotProvided(TypeOrConstParamId, Substitution), Panic(String), MirLowerError(FunctionId, MirLowerError), MirLowerErrorForClosure(ClosureId, MirLowerError), TypeIsUnsized(Ty, &'static str), NotSupported(String), InvalidConst(Const), - InFunction(FunctionId, Box), + InFunction(Either, Box, MirSpan, DefWithBodyId), ExecutionLimitExceeded, StackOverflow, TargetDataLayoutNotAvailable, InvalidVTableId(usize), + CoerceUnsizedError(Ty), +} + +impl MirEvalError { + pub fn pretty_print( + &self, + f: &mut String, + db: &dyn HirDatabase, + span_formatter: impl Fn(FileId, TextRange) -> String, + ) -> std::result::Result<(), std::fmt::Error> { + writeln!(f, "Mir eval error:")?; + let mut err = self; + while let MirEvalError::InFunction(func, e, span, def) = err { + err = e; + match func { + Either::Left(func) => { + let function_name = db.function_data(*func); + writeln!(f, "In function {} ({:?})", function_name.name, func)?; + } + Either::Right(clos) => { + writeln!(f, "In {:?}", clos)?; + } + } + let source_map = db.body_with_source_map(*def).1; + let span: InFile = match span { + MirSpan::ExprId(e) => match source_map.expr_syntax(*e) { + Ok(s) => s.map(|x| x.into()), + Err(_) => continue, + }, + MirSpan::PatId(p) => match source_map.pat_syntax(*p) { + Ok(s) => s.map(|x| match x { + Either::Left(e) => e.into(), + Either::Right(e) => e.into(), + }), + Err(_) => continue, + }, + MirSpan::Unknown => continue, + }; + let file_id = span.file_id.original_file(db.upcast()); + let text_range = span.value.text_range(); + writeln!(f, "{}", span_formatter(file_id, text_range))?; + } + match err { + MirEvalError::InFunction(..) => unreachable!(), + MirEvalError::LayoutError(err, ty) => { + write!( + f, + "Layout for type `{}` is not available due {err:?}", + ty.display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string() + )?; + } + MirEvalError::GenericArgNotProvided(id, subst) => { + let parent = id.parent; + let param = &db.generic_params(parent).type_or_consts[id.local_id]; + writeln!( + f, + "Generic arg not provided for {}", + param.name().unwrap_or(&Name::missing()) + )?; + writeln!(f, "Provided args: [")?; + for g in subst.iter(Interner) { + write!(f, " {},", g.display(db).to_string())?; + } + writeln!(f, "]")?; + } + MirEvalError::MirLowerError(func, err) => { + let function_name = db.function_data(*func); + writeln!( + f, + "MIR lowering for function `{}` ({:?}) failed due:", + function_name.name, func + )?; + err.pretty_print(f, db, span_formatter)?; + } + MirEvalError::ConstEvalError(name, err) => { + MirLowerError::ConstEvalError(name.clone(), err.clone()).pretty_print( + f, + db, + span_formatter, + )?; + } + MirEvalError::TypeError(_) + | MirEvalError::UndefinedBehavior(_) + | MirEvalError::Panic(_) + | MirEvalError::MirLowerErrorForClosure(_, _) + | MirEvalError::TypeIsUnsized(_, _) + | MirEvalError::NotSupported(_) + | MirEvalError::InvalidConst(_) + | MirEvalError::ExecutionLimitExceeded + | MirEvalError::StackOverflow + | MirEvalError::TargetDataLayoutNotAvailable + | MirEvalError::CoerceUnsizedError(_) + | MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?, + } + Ok(()) + } } impl std::fmt::Debug for MirEvalError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::ConstEvalError(arg0) => f.debug_tuple("ConstEvalError").field(arg0).finish(), + Self::ConstEvalError(arg0, arg1) => { + f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish() + } Self::LayoutError(arg0, arg1) => { f.debug_tuple("LayoutError").field(arg0).field(arg1).finish() } @@ -241,24 +373,28 @@ impl std::fmt::Debug for MirEvalError { Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."), Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"), Self::StackOverflow => write!(f, "stack overflow"), + Self::GenericArgNotProvided(..) => f.debug_tuple("GenericArgNotProvided").finish(), Self::MirLowerError(arg0, arg1) => { f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish() } Self::MirLowerErrorForClosure(arg0, arg1) => { f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish() } + Self::CoerceUnsizedError(arg0) => { + f.debug_tuple("CoerceUnsizedError").field(arg0).finish() + } Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(), Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(), Self::InvalidConst(arg0) => { let data = &arg0.data(Interner); f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish() } - Self::InFunction(func, e) => { + Self::InFunction(func, e, span, _) => { let mut e = &**e; - let mut stack = vec![*func]; - while let Self::InFunction(f, next_e) = e { + let mut stack = vec![(*func, *span)]; + while let Self::InFunction(f, next_e, span, _) = e { e = &next_e; - stack.push(*f); + stack.push((*f, *span)); } f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish() } @@ -266,22 +402,9 @@ impl std::fmt::Debug for MirEvalError { } } -macro_rules! not_supported { - ($x: expr) => { - return Err(MirEvalError::NotSupported(format!($x))) - }; -} - -impl From for MirEvalError { - fn from(value: ConstEvalError) -> Self { - match value { - _ => MirEvalError::ConstEvalError(Box::new(value)), - } - } -} - type Result = std::result::Result; +#[derive(Debug)] struct Locals<'a> { ptr: &'a ArenaMap, body: &'a MirBody, @@ -301,12 +424,14 @@ pub fn interpret_mir( ) -> Result { let ty = body.locals[return_slot()].ty.clone(); let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused); + let ty = evaluator.ty_filler(&ty, &subst, body.owner)?; let bytes = evaluator.interpret_mir(&body, None.into_iter(), subst.clone())?; - let memory_map = evaluator.create_memory_map( + let mut memory_map = evaluator.create_memory_map( &bytes, &ty, &Locals { ptr: &ArenaMap::new(), body: &body, subst: &subst }, )?; + memory_map.vtable = evaluator.vtable_map; return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty)); } @@ -322,6 +447,7 @@ impl Evaluator<'_> { stack: vec![0], heap: vec![0], vtable_map: VTableMap::default(), + static_locations: HashMap::default(), db, trait_env, crate_id, @@ -365,15 +491,21 @@ impl Evaluator<'_> { let mut metadata = None; // locals are always sized for proj in &p.projection { let prev_ty = ty.clone(); - ty = proj.projected_ty(ty, self.db, |c, f| { + ty = proj.projected_ty(ty, self.db, |c, subst, f| { let (def, _) = self.db.lookup_intern_closure(c.into()); let infer = self.db.infer(def); let (captures, _) = infer.closure_info(&c); - captures.get(f).expect("broken closure field").ty.clone() + let parent_subst = ClosureSubst(subst).parent_subst(); + captures + .get(f) + .expect("broken closure field") + .ty + .clone() + .substitute(Interner, parent_subst) }); match proj { ProjectionElem::Deref => { - metadata = if self.size_of(&ty, locals)?.is_none() { + metadata = if self.size_align_of(&ty, locals)?.is_none() { Some(Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() }) } else { None @@ -435,6 +567,11 @@ impl Evaluator<'_> { .map_err(|e| MirEvalError::LayoutError(e, ty.clone())) } + fn layout_filled(&self, ty: &Ty, locals: &Locals<'_>) -> Result { + let ty = &self.ty_filler(ty, locals.subst, locals.body.owner)?; + self.layout(ty) + } + fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result { self.db.layout_of_adt(adt, subst.clone()).map_err(|e| { MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner)) @@ -449,6 +586,10 @@ impl Evaluator<'_> { Ok(match o { Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?, Operand::Constant(c) => c.data(Interner).ty.clone(), + &Operand::Static(s) => { + let ty = self.db.infer(s.into())[self.db.body(s.into()).body_expr].clone(); + TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner) + } }) } @@ -524,11 +665,11 @@ impl Evaluator<'_> { let Some(terminator) = current_block.terminator.as_ref() else { not_supported!("block without terminator"); }; - match terminator { - Terminator::Goto { target } => { + match &terminator.kind { + TerminatorKind::Goto { target } => { current_block_idx = *target; } - Terminator::Call { + TerminatorKind::Call { func, args, destination, @@ -545,23 +686,36 @@ impl Evaluator<'_> { match &fn_ty.data(Interner).kind { TyKind::Function(_) => { let bytes = self.eval_operand(func, &locals)?; - self.exec_fn_pointer(bytes, destination, &args, &locals)?; + self.exec_fn_pointer( + bytes, + destination, + &args, + &locals, + terminator.span, + )?; } TyKind::FnDef(def, generic_args) => { - self.exec_fn_def(*def, generic_args, destination, &args, &locals)?; + self.exec_fn_def( + *def, + generic_args, + destination, + &args, + &locals, + terminator.span, + )?; } x => not_supported!("unknown function type {x:?}"), } current_block_idx = target.expect("broken mir, function without target"); } - Terminator::SwitchInt { discr, targets } => { + TerminatorKind::SwitchInt { discr, targets } => { let val = u128::from_le_bytes(pad16( self.eval_operand(discr, &locals)?.get(&self)?, false, )); current_block_idx = targets.target_for_value(val); } - Terminator::Return => { + TerminatorKind::Return => { let ty = body.locals[return_slot()].ty.clone(); self.stack_depth_limit += 1; return Ok(self @@ -571,8 +725,8 @@ impl Evaluator<'_> { )? .to_owned()); } - Terminator::Unreachable => { - return Err(MirEvalError::UndefinedBehavior("unreachable executed")); + TerminatorKind::Unreachable => { + return Err(MirEvalError::UndefinedBehavior("unreachable executed".to_owned())); } _ => not_supported!("unknown terminator"), } @@ -725,13 +879,13 @@ impl Evaluator<'_> { }; match layout.variants { Variants::Single { index } => { - let r = self.db.const_eval_discriminant(EnumVariantId { + let r = self.const_eval_discriminant(EnumVariantId { parent: enum_id, local_id: index.0, })?; Owned(r.to_le_bytes().to_vec()) } - Variants::Multiple { tag, tag_encoding, .. } => { + Variants::Multiple { tag, tag_encoding, variants, .. } => { let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else { not_supported!("missing target data layout"); }; @@ -744,25 +898,20 @@ impl Evaluator<'_> { } TagEncoding::Niche { untagged_variant, niche_start, .. } => { let tag = &bytes[offset..offset + size]; - let candidate_discriminant = i128::from_le_bytes(pad16(tag, false)) - .wrapping_sub(niche_start as i128); - let enum_data = self.db.enum_data(enum_id); - let result = 'b: { - for (local_id, _) in enum_data.variants.iter() { - if candidate_discriminant - == self.db.const_eval_discriminant(EnumVariantId { - parent: enum_id, - local_id, - })? - { - break 'b candidate_discriminant; - } - } - self.db.const_eval_discriminant(EnumVariantId { - parent: enum_id, - local_id: untagged_variant.0, - })? - }; + let candidate_tag = i128::from_le_bytes(pad16(tag, false)) + .wrapping_sub(niche_start as i128) + as usize; + let variant = variants + .iter_enumerated() + .map(|(x, _)| x) + .filter(|x| *x != untagged_variant) + .nth(candidate_tag) + .unwrap_or(untagged_variant) + .0; + let result = self.const_eval_discriminant(EnumVariantId { + parent: enum_id, + local_id: variant, + })?; Owned(result.to_le_bytes().to_vec()) } } @@ -771,6 +920,13 @@ impl Evaluator<'_> { } Rvalue::Repeat(_, _) => not_supported!("evaluating repeat rvalue"), Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"), + Rvalue::ShallowInitBoxWithAlloc(ty) => { + let Some((size, align)) = self.size_align_of(ty, locals)? else { + not_supported!("unsized box initialization"); + }; + let addr = self.heap_allocate(size, align); + Owned(addr.to_bytes()) + } Rvalue::CopyForDeref(_) => not_supported!("copy for deref"), Rvalue::Aggregate(kind, values) => { let values = values @@ -787,12 +943,12 @@ impl Evaluator<'_> { Owned(r) } AggregateKind::Tuple(ty) => { - let layout = self.layout(&ty)?; + let layout = self.layout_filled(&ty, locals)?; Owned(self.make_by_layout( layout.size.bytes_usize(), &layout, None, - values.iter().copied(), + values.iter().map(|&x| x.into()), )?) } AggregateKind::Union(x, f) => { @@ -814,74 +970,44 @@ impl Evaluator<'_> { size, &variant_layout, tag, - values.iter().copied(), + values.iter().map(|&x| x.into()), )?) } AggregateKind::Closure(ty) => { - let layout = self.layout(&ty)?; + let layout = self.layout_filled(&ty, locals)?; Owned(self.make_by_layout( layout.size.bytes_usize(), &layout, None, - values.iter().copied(), + values.iter().map(|&x| x.into()), )?) } } } Rvalue::Cast(kind, operand, target_ty) => match kind { CastKind::Pointer(cast) => match cast { - PointerCast::ReifyFnPointer => { + PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => { let current_ty = self.operand_ty(operand, locals)?; - if let TyKind::FnDef(_, _) = ¤t_ty.data(Interner).kind { + if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) = + ¤t_ty.data(Interner).kind + { let id = self.vtable_map.id(current_ty); let ptr_size = self.ptr_size(); Owned(id.to_le_bytes()[0..ptr_size].to_vec()) } else { - not_supported!("ReifyFnPointer cast of a non FnDef type"); + not_supported!( + "creating a fn pointer from a non FnDef or Closure type" + ); } } PointerCast::Unsize => { let current_ty = self.operand_ty(operand, locals)?; - match &target_ty.data(Interner).kind { - TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => { - match &ty.data(Interner).kind { - TyKind::Slice(_) => match ¤t_ty.data(Interner).kind { - TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => { - match &ty.data(Interner).kind { - TyKind::Array(_, size) => { - let addr = self - .eval_operand(operand, locals)? - .get(&self)?; - let len = const_as_usize(size); - let mut r = Vec::with_capacity(16); - r.extend(addr.iter().copied()); - r.extend(len.to_le_bytes().into_iter()); - Owned(r) - } - _ => { - not_supported!("slice unsizing from non arrays") - } - } - } - _ => not_supported!("slice unsizing from non pointers"), - }, - TyKind::Dyn(_) => match ¤t_ty.data(Interner).kind { - TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => { - let vtable = self.vtable_map.id(ty.clone()); - let addr = - self.eval_operand(operand, locals)?.get(&self)?; - let mut r = Vec::with_capacity(16); - r.extend(addr.iter().copied()); - r.extend(vtable.to_le_bytes().into_iter()); - Owned(r) - } - _ => not_supported!("dyn unsizing from non pointers"), - }, - _ => not_supported!("unknown unsized cast"), - } - } - _ => not_supported!("unsized cast on unknown pointer type"), - } + let addr = self.eval_operand(operand, locals)?; + self.coerce_unsized(addr, ¤t_ty, target_ty)? + } + PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => { + // This is no-op + Borrowed(self.eval_operand(operand, locals)?) } x => not_supported!("pointer cast {x:?}"), }, @@ -909,6 +1035,77 @@ impl Evaluator<'_> { }) } + fn coerce_unsized_look_through_fields( + &self, + ty: &Ty, + goal: impl Fn(&TyKind) -> Option, + ) -> Result { + let kind = ty.kind(Interner); + if let Some(x) = goal(kind) { + return Ok(x); + } + if let TyKind::Adt(id, subst) = kind { + if let AdtId::StructId(struct_id) = id.0 { + let field_types = self.db.field_types(struct_id.into()); + let mut field_types = field_types.iter(); + if let Some(ty) = + field_types.next().map(|x| x.1.clone().substitute(Interner, subst)) + { + return self.coerce_unsized_look_through_fields(&ty, goal); + } + } + } + Err(MirEvalError::CoerceUnsizedError(ty.clone())) + } + + fn coerce_unsized( + &mut self, + addr: Interval, + current_ty: &Ty, + target_ty: &Ty, + ) -> Result { + use IntervalOrOwned::*; + fn for_ptr(x: &TyKind) -> Option { + match x { + TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => Some(ty.clone()), + _ => None, + } + } + Ok(match self.coerce_unsized_look_through_fields(target_ty, for_ptr)? { + ty => match &ty.data(Interner).kind { + TyKind::Slice(_) => { + match self.coerce_unsized_look_through_fields(current_ty, for_ptr)? { + ty => match &ty.data(Interner).kind { + TyKind::Array(_, size) => { + let len = const_as_usize(size); + let mut r = Vec::with_capacity(16); + let addr = addr.get(self)?; + r.extend(addr.iter().copied()); + r.extend(len.to_le_bytes().into_iter()); + Owned(r) + } + _ => { + not_supported!("slice unsizing from non arrays") + } + }, + } + } + TyKind::Dyn(_) => match ¤t_ty.data(Interner).kind { + TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => { + let vtable = self.vtable_map.id(ty.clone()); + let mut r = Vec::with_capacity(16); + let addr = addr.get(self)?; + r.extend(addr.iter().copied()); + r.extend(vtable.to_le_bytes().into_iter()); + Owned(r) + } + _ => not_supported!("dyn unsizing from non pointers"), + }, + _ => not_supported!("unknown unsized cast"), + }, + }) + } + fn layout_of_variant( &mut self, x: VariantId, @@ -921,7 +1118,7 @@ impl Evaluator<'_> { if AdtId::from(f.parent) == adt { // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and // infinite sized type errors) we use a dummy layout - let i = self.db.const_eval_discriminant(x)?; + let i = self.const_eval_discriminant(x)?; return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); } } @@ -939,13 +1136,22 @@ impl Evaluator<'_> { _ => not_supported!("multi variant layout for non-enums"), }; let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id); - let mut discriminant = self.db.const_eval_discriminant(enum_variant_id)?; + let mut discriminant = self.const_eval_discriminant(enum_variant_id)?; let variant_layout = variants[rustc_enum_variant_idx].clone(); let have_tag = match tag_encoding { TagEncoding::Direct => true, TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => { - discriminant = discriminant.wrapping_add(niche_start as i128); - untagged_variant != rustc_enum_variant_idx + if untagged_variant == rustc_enum_variant_idx { + false + } else { + discriminant = (variants + .iter_enumerated() + .filter(|(x, _)| *x != untagged_variant) + .position(|(x, _)| x == rustc_enum_variant_idx) + .unwrap() as i128) + .wrapping_add(niche_start as i128); + true + } } }; ( @@ -970,7 +1176,7 @@ impl Evaluator<'_> { size: usize, // Not necessarily equal to variant_layout.size variant_layout: &Layout, tag: Option<(usize, usize, i128)>, - values: impl Iterator, + values: impl Iterator, ) -> Result> { let mut result = vec![0; size]; if let Some((offset, size, value)) = tag { @@ -987,6 +1193,10 @@ impl Evaluator<'_> { fn eval_operand(&mut self, x: &Operand, locals: &Locals<'_>) -> Result { Ok(match x { Operand::Copy(p) | Operand::Move(p) => self.eval_place(p, locals)?, + Operand::Static(st) => { + let addr = self.eval_static(*st, locals)?; + Interval::new(addr, self.ptr_size()) + } Operand::Constant(konst) => { let data = &konst.data(Interner); match &data.value { @@ -1003,37 +1213,71 @@ impl Evaluator<'_> { not_supported!("inference var constant") } chalk_ir::ConstValue::Placeholder(_) => not_supported!("placeholder constant"), - chalk_ir::ConstValue::Concrete(c) => match &c.interned { - ConstScalar::Bytes(v, memory_map) => { - let mut v: Cow<'_, [u8]> = Cow::Borrowed(v); - let patch_map = memory_map.transform_addresses(|b| { - let addr = self.heap_allocate(b.len()); - self.write_memory(addr, b)?; - Ok(addr.to_usize()) - })?; - let size = self.size_of(&data.ty, locals)?.unwrap_or(v.len()); - if size != v.len() { - // Handle self enum - if size == 16 && v.len() < 16 { - v = Cow::Owned(pad16(&v, false).to_vec()); - } else if size < 16 && v.len() == 16 { - v = Cow::Owned(v[0..size].to_vec()); - } else { - return Err(MirEvalError::InvalidConst(konst.clone())); - } - } - let addr = self.heap_allocate(size); - self.write_memory(addr, &v)?; - self.patch_addresses(&patch_map, addr, &data.ty, locals)?; - Interval::new(addr, size) - } - ConstScalar::Unknown => not_supported!("evaluating unknown const"), - }, + chalk_ir::ConstValue::Concrete(c) => { + self.allocate_const_in_heap(c, &data.ty, locals, konst)? + } } } }) } + fn allocate_const_in_heap( + &mut self, + c: &chalk_ir::ConcreteConst, + ty: &Ty, + locals: &Locals<'_>, + konst: &chalk_ir::Const, + ) -> Result { + Ok(match &c.interned { + ConstScalar::Bytes(v, memory_map) => { + let mut v: Cow<'_, [u8]> = Cow::Borrowed(v); + let patch_map = memory_map.transform_addresses(|b| { + let addr = self.heap_allocate(b.len(), 1); // FIXME: align is wrong + self.write_memory(addr, b)?; + Ok(addr.to_usize()) + })?; + let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1)); + if size != v.len() { + // Handle self enum + if size == 16 && v.len() < 16 { + v = Cow::Owned(pad16(&v, false).to_vec()); + } else if size < 16 && v.len() == 16 { + v = Cow::Owned(v[0..size].to_vec()); + } else { + return Err(MirEvalError::InvalidConst(konst.clone())); + } + } + let addr = self.heap_allocate(size, align); + self.write_memory(addr, &v)?; + self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?; + Interval::new(addr, size) + } + ConstScalar::UnevaluatedConst(const_id, subst) => { + let subst = self.subst_filler(subst, locals); + let (const_id, subst) = lookup_impl_const( + self.db, + self.db.trait_environment_for_body(locals.body.owner), + *const_id, + subst, + ); + let c = self.db.const_eval(const_id.into(), subst).map_err(|e| { + let const_data = self.db.const_data(const_id); + MirEvalError::ConstEvalError( + const_data.name.as_ref().and_then(|x| x.as_str()).unwrap_or("_").to_owned(), + Box::new(e), + ) + })?; + if let chalk_ir::ConstValue::Concrete(c) = &c.data(Interner).value { + if let ConstScalar::Bytes(_, _) = &c.interned { + return self.allocate_const_in_heap(&c, ty, locals, konst); + } + } + not_supported!("failing at evaluating unevaluated const"); + } + ConstScalar::Unknown => not_supported!("evaluating unknown const"), + }) + } + fn eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result { let addr = self.place_addr(p, locals)?; Ok(Interval::new( @@ -1046,53 +1290,61 @@ impl Evaluator<'_> { let (mem, pos) = match addr { Stack(x) => (&self.stack, x), Heap(x) => (&self.heap, x), - Invalid(_) => { - return Err(MirEvalError::UndefinedBehavior("read invalid memory address")) + Invalid(x) => { + return Err(MirEvalError::UndefinedBehavior(format!( + "read invalid memory address {x} with size {size}" + ))); } }; - mem.get(pos..pos + size).ok_or(MirEvalError::UndefinedBehavior("out of bound memory read")) + mem.get(pos..pos + size) + .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string())) } fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> { let (mem, pos) = match addr { Stack(x) => (&mut self.stack, x), Heap(x) => (&mut self.heap, x), - Invalid(_) => { - return Err(MirEvalError::UndefinedBehavior("write invalid memory address")) + Invalid(x) => { + return Err(MirEvalError::UndefinedBehavior(format!( + "write invalid memory address {x} with content {r:?}" + ))); } }; mem.get_mut(pos..pos + r.len()) - .ok_or(MirEvalError::UndefinedBehavior("out of bound memory write"))? + .ok_or_else(|| { + MirEvalError::UndefinedBehavior("out of bound memory write".to_string()) + })? .copy_from_slice(r); Ok(()) } - fn size_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result> { + fn size_align_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result> { if let DefWithBodyId::VariantId(f) = locals.body.owner { if let Some((adt, _)) = ty.as_adt() { if AdtId::from(f.parent) == adt { // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and // infinite sized type errors) we use a dummy size - return Ok(Some(16)); + return Ok(Some((16, 16))); } } } - let ty = &self.ty_filler(ty, locals.subst, locals.body.owner)?; - let layout = self.layout(ty); + let layout = self.layout_filled(ty, locals); if self.assert_placeholder_ty_is_unused { if matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _))) { - return Ok(Some(0)); + return Ok(Some((0, 1))); } } let layout = layout?; - Ok(layout.is_sized().then(|| layout.size.bytes_usize())) + Ok(layout + .is_sized() + .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize))) } /// A version of `self.size_of` which returns error if the type is unsized. `what` argument should /// be something that complete this: `error: type {ty} was unsized. {what} should be sized` fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result { - match self.size_of(ty, locals)? { - Some(x) => Ok(x), + match self.size_align_of(ty, locals)? { + Some(x) => Ok(x.0), None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)), } } @@ -1120,7 +1372,7 @@ impl Evaluator<'_> { struct Filler<'a> { db: &'a dyn HirDatabase, subst: &'a Substitution, - skip_params: usize, + generics: Option, } impl FallibleTypeFolder for Filler<'_> { type Error = MirEvalError; @@ -1144,7 +1396,11 @@ impl Evaluator<'_> { match impl_trait_id { crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { let infer = self.db.infer(func.into()); - let filler = &mut Filler { db: self.db, subst, skip_params: 0 }; + let filler = &mut Filler { + db: self.db, + subst, + generics: Some(generics(self.db.upcast(), func.into())), + }; filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder) } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { @@ -1162,22 +1418,36 @@ impl Evaluator<'_> { _outer_binder: DebruijnIndex, ) -> std::result::Result { let x = from_placeholder_idx(self.db, idx); + let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else { + not_supported!("missing idx in generics"); + }; Ok(self .subst .as_slice(Interner) - .get((u32::from(x.local_id.into_raw()) as usize) + self.skip_params) + .get(idx) .and_then(|x| x.ty(Interner)) - .ok_or(MirEvalError::TypeError("Generic arg not provided"))? + .ok_or_else(|| MirEvalError::GenericArgNotProvided(x, self.subst.clone()))? .clone()) } } - let filler = &mut Filler { db: self.db, subst, skip_params: 0 }; - Ok(normalize(self.db, owner, ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST)?)) + let g_def = match owner { + DefWithBodyId::FunctionId(f) => Some(f.into()), + DefWithBodyId::StaticId(_) => None, + DefWithBodyId::ConstId(f) => Some(f.into()), + DefWithBodyId::VariantId(f) => Some(f.into()), + }; + let generics = g_def.map(|g_def| generics(self.db.upcast(), g_def)); + let filler = &mut Filler { db: self.db, subst, generics }; + Ok(normalize( + self.db, + self.trait_env.clone(), + ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST)?, + )) } - fn heap_allocate(&mut self, s: usize) -> Address { + fn heap_allocate(&mut self, size: usize, _align: usize) -> Address { let pos = self.heap.len(); - self.heap.extend(iter::repeat(0).take(s)); + self.heap.extend(iter::repeat(0).take(size)); Address::Heap(pos) } @@ -1185,16 +1455,6 @@ impl Evaluator<'_> { self.interpret_mir(&body, vec![].into_iter(), Substitution::empty(Interner)) } - fn detect_lang_function(&self, def: FunctionId) -> Option { - use LangItem::*; - let candidate = lang_attr(self.db.upcast(), def)?; - // We want to execute these functions with special logic - if [PanicFmt, BeginPanic, SliceLen].contains(&candidate) { - return Some(candidate); - } - None - } - fn detect_fn_trait(&self, def: FunctionId) -> Option { use LangItem::*; let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else { @@ -1214,9 +1474,9 @@ impl Evaluator<'_> { let mut mm = MemoryMap::default(); match ty.kind(Interner) { TyKind::Ref(_, _, t) => { - let size = self.size_of(t, locals)?; + let size = self.size_align_of(t, locals)?; match size { - Some(size) => { + Some((size, _)) => { let addr_usize = from_bytes!(usize, bytes); mm.insert( addr_usize, @@ -1246,15 +1506,17 @@ impl Evaluator<'_> { fn patch_addresses( &mut self, patch_map: &HashMap, + old_vtable: &VTableMap, addr: Address, ty: &Ty, locals: &Locals<'_>, ) -> Result<()> { // FIXME: support indirect references + let layout = self.layout(ty)?; let my_size = self.size_of_sized(ty, locals, "value to patch address")?; match ty.kind(Interner) { TyKind::Ref(_, _, t) => { - let size = self.size_of(t, locals)?; + let size = self.size_align_of(t, locals)?; match size { Some(_) => { let current = from_bytes!(usize, self.read_memory(addr, my_size)?); @@ -1270,98 +1532,50 @@ impl Evaluator<'_> { } } } - _ => (), - } - Ok(()) - } - - fn exec_intrinsic( - &mut self, - as_str: &str, - args: &[IntervalAndTy], - generic_args: Substitution, - destination: Interval, - locals: &Locals<'_>, - ) -> Result<()> { - match as_str { - "size_of" => { - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { - return Err(MirEvalError::TypeError("size_of generic arg is not provided")); - }; - let size = self.size_of_sized(ty, locals, "size_of arg")?; - destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size]) - } - "wrapping_add" => { - let [lhs, rhs] = args else { - return Err(MirEvalError::TypeError("const_eval_select args are not provided")); - }; - let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); - let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); - let ans = lhs.wrapping_add(rhs); - destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) - } - "copy" | "copy_nonoverlapping" => { - let [src, dst, offset] = args else { - return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided")); - }; - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { - return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided")); - }; - let src = Address::from_bytes(src.get(self)?)?; - let dst = Address::from_bytes(dst.get(self)?)?; - let offset = from_bytes!(usize, offset.get(self)?); - let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?; - let size = offset * size; - let src = Interval { addr: src, size }; - let dst = Interval { addr: dst, size }; - dst.write_from_interval(self, src) - } - "offset" | "arith_offset" => { - let [ptr, offset] = args else { - return Err(MirEvalError::TypeError("offset args are not provided")); - }; - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { - return Err(MirEvalError::TypeError("offset generic arg is not provided")); - }; - let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false)); - let offset = u128::from_le_bytes(pad16(offset.get(self)?, false)); - let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128; - let ans = ptr + offset * size; - destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) - } - "assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" => { - // FIXME: We should actually implement these checks - Ok(()) - } - "forget" => { - // We don't call any drop glue yet, so there is nothing here - Ok(()) - } - "transmute" => { - let [arg] = args else { - return Err(MirEvalError::TypeError("trasmute arg is not provided")); - }; - destination.write_from_interval(self, arg.interval) + TyKind::Function(_) => { + let ty = old_vtable.ty_of_bytes(self.read_memory(addr, my_size)?)?.clone(); + let new_id = self.vtable_map.id(ty); + self.write_memory(addr, &new_id.to_le_bytes())?; } - "const_eval_select" => { - let [tuple, const_fn, _] = args else { - return Err(MirEvalError::TypeError("const_eval_select args are not provided")); - }; - let mut args = vec![const_fn.clone()]; - let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else { - return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple")); - }; - let layout = self.layout(&tuple.ty)?; - for (i, field) in fields.iter(Interner).enumerate() { - let field = field.assert_ty_ref(Interner).clone(); - let offset = layout.fields.offset(i).bytes_usize(); - let addr = tuple.interval.addr.offset(offset); - args.push(IntervalAndTy::new(addr, field, self, locals)?); + TyKind::Adt(id, subst) => match id.0 { + AdtId::StructId(s) => { + for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() { + let offset = layout.fields.offset(i).bytes_usize(); + let ty = ty.clone().substitute(Interner, subst); + self.patch_addresses( + patch_map, + old_vtable, + addr.offset(offset), + &ty, + locals, + )?; + } } - self.exec_fn_trait(&args, destination, locals) - } - _ => not_supported!("unknown intrinsic {as_str}"), + AdtId::UnionId(_) => (), + AdtId::EnumId(_) => (), + }, + TyKind::AssociatedType(_, _) + | TyKind::Scalar(_) + | TyKind::Tuple(_, _) + | TyKind::Array(_, _) + | TyKind::Slice(_) + | TyKind::Raw(_, _) + | TyKind::OpaqueType(_, _) + | TyKind::FnDef(_, _) + | TyKind::Str + | TyKind::Never + | TyKind::Closure(_, _) + | TyKind::Generator(_, _) + | TyKind::GeneratorWitness(_, _) + | TyKind::Foreign(_) + | TyKind::Error + | TyKind::Placeholder(_) + | TyKind::Dyn(_) + | TyKind::Alias(_) + | TyKind::BoundVar(_) + | TyKind::InferenceVar(_, _) => (), } + Ok(()) } fn exec_fn_pointer( @@ -1370,13 +1584,18 @@ impl Evaluator<'_> { destination: Interval, args: &[IntervalAndTy], locals: &Locals<'_>, + span: MirSpan, ) -> Result<()> { let id = from_bytes!(usize, bytes.get(self)?); let next_ty = self.vtable_map.ty(id)?.clone(); - if let TyKind::FnDef(def, generic_args) = &next_ty.data(Interner).kind { - self.exec_fn_def(*def, generic_args, destination, args, &locals)?; - } else { - return Err(MirEvalError::TypeError("function pointer to non function")); + match &next_ty.data(Interner).kind { + TyKind::FnDef(def, generic_args) => { + self.exec_fn_def(*def, generic_args, destination, args, &locals, span)?; + } + TyKind::Closure(id, subst) => { + self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)?; + } + _ => return Err(MirEvalError::TypeError("function pointer to non function")), } Ok(()) } @@ -1388,6 +1607,8 @@ impl Evaluator<'_> { generic_args: &Substitution, destination: Interval, args: &[IntervalAndTy], + locals: &Locals<'_>, + span: MirSpan, ) -> Result<()> { let mir_body = self .db @@ -1396,7 +1617,16 @@ impl Evaluator<'_> { let arg_bytes = iter::once(Ok(closure_data.get(self)?.to_owned())) .chain(args.iter().map(|x| Ok(x.get(&self)?.to_owned()))) .collect::>>()?; - let bytes = self.interpret_mir(&mir_body, arg_bytes.into_iter(), generic_args.clone())?; + let bytes = self + .interpret_mir(&mir_body, arg_bytes.into_iter(), generic_args.clone()) + .map_err(|e| { + MirEvalError::InFunction( + Either::Right(closure), + Box::new(e), + span, + locals.body.owner, + ) + })?; destination.write_from_bytes(self, &bytes) } @@ -1407,16 +1637,17 @@ impl Evaluator<'_> { destination: Interval, args: &[IntervalAndTy], locals: &Locals<'_>, + span: MirSpan, ) -> Result<()> { let def: CallableDefId = from_chalk(self.db, def); let generic_args = self.subst_filler(generic_args, &locals); match def { CallableDefId::FunctionId(def) => { if let Some(_) = self.detect_fn_trait(def) { - self.exec_fn_trait(&args, destination, locals)?; + self.exec_fn_trait(&args, destination, locals, span)?; return Ok(()); } - self.exec_fn_with_args(def, args, generic_args, locals, destination)?; + self.exec_fn_with_args(def, args, generic_args, locals, destination, span)?; } CallableDefId::StructId(id) => { let (size, variant_layout, tag) = @@ -1425,7 +1656,7 @@ impl Evaluator<'_> { size, &variant_layout, tag, - args.iter().map(|x| x.interval), + args.iter().map(|x| x.interval.into()), )?; destination.write_from_bytes(self, &result)?; } @@ -1436,7 +1667,7 @@ impl Evaluator<'_> { size, &variant_layout, tag, - args.iter().map(|x| x.interval), + args.iter().map(|x| x.interval.into()), )?; destination.write_from_bytes(self, &result)?; } @@ -1451,50 +1682,37 @@ impl Evaluator<'_> { generic_args: Substitution, locals: &Locals<'_>, destination: Interval, + span: MirSpan, ) -> Result<()> { - let function_data = self.db.function_data(def); - let is_intrinsic = match &function_data.abi { - Some(abi) => *abi == Interned::new_str("rust-intrinsic"), - None => match def.lookup(self.db.upcast()).container { - hir_def::ItemContainerId::ExternBlockId(block) => { - let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value].abi.as_deref() - == Some("rust-intrinsic") - } - _ => false, - }, - }; - if is_intrinsic { - return self.exec_intrinsic( - function_data.name.as_text().unwrap_or_default().as_str(), - args, - generic_args, - destination, - &locals, - ); + if self.detect_and_exec_special_function( + def, + args, + &generic_args, + locals, + destination, + span, + )? { + return Ok(()); } let arg_bytes = args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::>>()?; - let result = if let Some(x) = self.detect_lang_function(def) { - self.exec_lang_item(x, &arg_bytes)? - } else { - if let Some(self_ty_idx) = - is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone()) - { - // In the layout of current possible receiver, which at the moment of writing this code is one of - // `&T`, `&mut T`, `Box`, `Rc`, `Arc`, and `Pin

` where `P` is one of possible receivers, - // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on - // the type. - let ty = self - .vtable_map - .ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?; - let mut args_for_target = args.to_vec(); - args_for_target[0] = IntervalAndTy { - interval: args_for_target[0].interval.slice(0..self.ptr_size()), - ty: ty.clone(), - }; - let ty = GenericArgData::Ty(ty.clone()).intern(Interner); - let generics_for_target = Substitution::from_iter( + if let Some(self_ty_idx) = + is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone()) + { + // In the layout of current possible receiver, which at the moment of writing this code is one of + // `&T`, `&mut T`, `Box`, `Rc`, `Arc`, and `Pin

` where `P` is one of possible recievers, + // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on + // the type. + let ty = + self.vtable_map.ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?; + let mut args_for_target = args.to_vec(); + args_for_target[0] = IntervalAndTy { + interval: args_for_target[0].interval.slice(0..self.ptr_size()), + ty: ty.clone(), + }; + let ty = GenericArgData::Ty(ty.clone()).intern(Interner); + let generics_for_target = + Substitution::from_iter( Interner, generic_args.iter(Interner).enumerate().map(|(i, x)| { if i == self_ty_idx { @@ -1504,23 +1722,25 @@ impl Evaluator<'_> { } }), ); - return self.exec_fn_with_args( - def, - &args_for_target, - generics_for_target, - locals, - destination, - ); - } - let (imp, generic_args) = - lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args); - let generic_args = self.subst_filler(&generic_args, &locals); - let def = imp.into(); - let mir_body = - self.db.mir_body(def).map_err(|e| MirEvalError::MirLowerError(imp, e))?; - self.interpret_mir(&mir_body, arg_bytes.iter().cloned(), generic_args) - .map_err(|e| MirEvalError::InFunction(imp, Box::new(e)))? - }; + return self.exec_fn_with_args( + def, + &args_for_target, + generics_for_target, + locals, + destination, + span, + ); + } + let (imp, generic_args) = + lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args); + let generic_args = self.subst_filler(&generic_args, &locals); + let def = imp.into(); + let mir_body = self.db.mir_body(def).map_err(|e| MirEvalError::MirLowerError(imp, e))?; + let result = self + .interpret_mir(&mir_body, arg_bytes.iter().cloned(), generic_args) + .map_err(|e| { + MirEvalError::InFunction(Either::Left(imp), Box::new(e), span, locals.body.owner) + })?; destination.write_from_bytes(self, &result)?; Ok(()) } @@ -1530,6 +1750,7 @@ impl Evaluator<'_> { args: &[IntervalAndTy], destination: Interval, locals: &Locals<'_>, + span: MirSpan, ) -> Result<()> { let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?; let mut func_ty = func.ty.clone(); @@ -1547,35 +1768,69 @@ impl Evaluator<'_> { } match &func_ty.data(Interner).kind { TyKind::FnDef(def, subst) => { - self.exec_fn_def(*def, subst, destination, &args[1..], locals)?; + self.exec_fn_def(*def, subst, destination, &args[1..], locals, span)?; } TyKind::Function(_) => { - self.exec_fn_pointer(func_data, destination, &args[1..], locals)?; + self.exec_fn_pointer(func_data, destination, &args[1..], locals, span)?; } TyKind::Closure(closure, subst) => { - self.exec_closure(*closure, func_data, subst, destination, &args[1..])?; + self.exec_closure( + *closure, + func_data, + &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()), + destination, + &args[1..], + locals, + span, + )?; } x => not_supported!("Call FnTrait methods with type {x:?}"), } Ok(()) } - fn exec_lang_item(&self, x: LangItem, args: &[Vec]) -> Result> { - use LangItem::*; - let mut args = args.iter(); - match x { - // FIXME: we want to find the panic message from arguments, but it wouldn't work - // currently even if we do that, since macro expansion of panic related macros - // is dummy. - PanicFmt | BeginPanic => Err(MirEvalError::Panic("".to_string())), - SliceLen => { - let arg = args - .next() - .ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?; - let ptr_size = arg.len() / 2; - Ok(arg[ptr_size..].into()) + fn eval_static(&mut self, st: StaticId, locals: &Locals<'_>) -> Result

{ + if let Some(o) = self.static_locations.get(&st) { + return Ok(*o); + }; + let static_data = self.db.static_data(st); + let result = if !static_data.is_extern { + let konst = + self.db.const_eval(st.into(), Substitution::empty(Interner)).map_err(|e| { + MirEvalError::ConstEvalError( + static_data.name.as_str().unwrap_or("_").to_owned(), + Box::new(e), + ) + })?; + let data = &konst.data(Interner); + if let chalk_ir::ConstValue::Concrete(c) = &data.value { + self.allocate_const_in_heap(&c, &data.ty, locals, &konst)? + } else { + not_supported!("unevaluatable static"); + } + } else { + let ty = &self.db.infer(st.into())[self.db.body(st.into()).body_expr]; + let Some((size, align)) = self.size_align_of(&ty, locals)? else { + not_supported!("unsized extern static"); + }; + let addr = self.heap_allocate(size, align); + Interval::new(addr, size) + }; + let addr = self.heap_allocate(self.ptr_size(), self.ptr_size()); + self.write_memory(addr, &result.addr.to_bytes())?; + self.static_locations.insert(st, addr); + Ok(addr) + } + + fn const_eval_discriminant(&self, variant: EnumVariantId) -> Result { + let r = self.db.const_eval_discriminant(variant); + match r { + Ok(r) => Ok(r), + Err(e) => { + let data = self.db.enum_data(variant.parent); + let name = format!("{}::{}", data.name, data.variants[variant.local_id].name); + Err(MirEvalError::ConstEvalError(name, Box::new(e))) } - x => not_supported!("Executing lang item {x:?}"), } } } diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs new file mode 100644 index 000000000000..78938af15b87 --- /dev/null +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -0,0 +1,396 @@ +//! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation +//! is not available. + +use super::*; + +macro_rules! from_bytes { + ($ty:tt, $value:expr) => { + ($ty::from_le_bytes(match ($value).try_into() { + Ok(x) => x, + Err(_) => return Err(MirEvalError::TypeError("mismatched size")), + })) + }; +} + +macro_rules! not_supported { + ($x: expr) => { + return Err(MirEvalError::NotSupported(format!($x))) + }; +} + +impl Evaluator<'_> { + pub(super) fn detect_and_exec_special_function( + &mut self, + def: FunctionId, + args: &[IntervalAndTy], + generic_args: &Substitution, + locals: &Locals<'_>, + destination: Interval, + span: MirSpan, + ) -> Result { + let function_data = self.db.function_data(def); + let is_intrinsic = match &function_data.abi { + Some(abi) => *abi == Interned::new_str("rust-intrinsic"), + None => match def.lookup(self.db.upcast()).container { + hir_def::ItemContainerId::ExternBlockId(block) => { + let id = block.lookup(self.db.upcast()).id; + id.item_tree(self.db.upcast())[id.value].abi.as_deref() + == Some("rust-intrinsic") + } + _ => false, + }, + }; + if is_intrinsic { + self.exec_intrinsic( + function_data.name.as_text().unwrap_or_default().as_str(), + args, + generic_args, + destination, + &locals, + span, + )?; + return Ok(true); + } + let alloc_fn = function_data + .attrs + .iter() + .filter_map(|x| x.path().as_ident()) + .filter_map(|x| x.as_str()) + .find(|x| { + [ + "rustc_allocator", + "rustc_deallocator", + "rustc_reallocator", + "rustc_allocator_zeroed", + ] + .contains(x) + }); + if let Some(alloc_fn) = alloc_fn { + self.exec_alloc_fn(alloc_fn, args, destination)?; + return Ok(true); + } + if let Some(x) = self.detect_lang_function(def) { + let arg_bytes = + args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::>>()?; + let result = self.exec_lang_item(x, &arg_bytes)?; + destination.write_from_bytes(self, &result)?; + return Ok(true); + } + Ok(false) + } + + fn exec_alloc_fn( + &mut self, + alloc_fn: &str, + args: &[IntervalAndTy], + destination: Interval, + ) -> Result<()> { + match alloc_fn { + "rustc_allocator_zeroed" | "rustc_allocator" => { + let [size, align] = args else { + return Err(MirEvalError::TypeError("rustc_allocator args are not provided")); + }; + let size = from_bytes!(usize, size.get(self)?); + let align = from_bytes!(usize, align.get(self)?); + let result = self.heap_allocate(size, align); + destination.write_from_bytes(self, &result.to_bytes())?; + } + "rustc_deallocator" => { /* no-op for now */ } + "rustc_reallocator" => { + let [ptr, old_size, align, new_size] = args else { + return Err(MirEvalError::TypeError("rustc_allocator args are not provided")); + }; + let ptr = Address::from_bytes(ptr.get(self)?)?; + let old_size = from_bytes!(usize, old_size.get(self)?); + let new_size = from_bytes!(usize, new_size.get(self)?); + let align = from_bytes!(usize, align.get(self)?); + let result = self.heap_allocate(new_size, align); + Interval { addr: result, size: old_size } + .write_from_interval(self, Interval { addr: ptr, size: old_size })?; + destination.write_from_bytes(self, &result.to_bytes())?; + } + _ => not_supported!("unknown alloc function"), + } + Ok(()) + } + + fn detect_lang_function(&self, def: FunctionId) -> Option { + use LangItem::*; + let candidate = lang_attr(self.db.upcast(), def)?; + // We want to execute these functions with special logic + if [PanicFmt, BeginPanic, SliceLen].contains(&candidate) { + return Some(candidate); + } + None + } + + fn exec_lang_item(&self, x: LangItem, args: &[Vec]) -> Result> { + use LangItem::*; + let mut args = args.iter(); + match x { + // FIXME: we want to find the panic message from arguments, but it wouldn't work + // currently even if we do that, since macro expansion of panic related macros + // is dummy. + PanicFmt | BeginPanic => Err(MirEvalError::Panic("".to_string())), + SliceLen => { + let arg = args + .next() + .ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?; + let ptr_size = arg.len() / 2; + Ok(arg[ptr_size..].into()) + } + x => not_supported!("Executing lang item {x:?}"), + } + } + + fn exec_intrinsic( + &mut self, + as_str: &str, + args: &[IntervalAndTy], + generic_args: &Substitution, + destination: Interval, + locals: &Locals<'_>, + span: MirSpan, + ) -> Result<()> { + // We are a single threaded runtime with no UB checking and no optimization, so + // we can implement these as normal functions. + if let Some(name) = as_str.strip_prefix("atomic_") { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided")); + }; + let Some(arg0) = args.get(0) else { + return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided")); + }; + let arg0_addr = Address::from_bytes(arg0.get(self)?)?; + let arg0_interval = Interval::new( + arg0_addr, + self.size_of_sized(ty, locals, "atomic intrinsic type arg")?, + ); + if name.starts_with("load_") { + return destination.write_from_interval(self, arg0_interval); + } + let Some(arg1) = args.get(1) else { + return Err(MirEvalError::TypeError("atomic intrinsic arg1 is not provided")); + }; + if name.starts_with("store_") { + return arg0_interval.write_from_interval(self, arg1.interval); + } + if name.starts_with("xchg_") { + destination.write_from_interval(self, arg0_interval)?; + return arg0_interval.write_from_interval(self, arg1.interval); + } + if name.starts_with("xadd_") { + destination.write_from_interval(self, arg0_interval)?; + let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false)); + let ans = lhs.wrapping_add(rhs); + return arg0_interval + .write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); + } + if name.starts_with("xsub_") { + destination.write_from_interval(self, arg0_interval)?; + let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false)); + let ans = lhs.wrapping_sub(rhs); + return arg0_interval + .write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); + } + if name.starts_with("and_") { + destination.write_from_interval(self, arg0_interval)?; + let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false)); + let ans = lhs & rhs; + return arg0_interval + .write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); + } + if name.starts_with("or_") { + destination.write_from_interval(self, arg0_interval)?; + let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false)); + let ans = lhs | rhs; + return arg0_interval + .write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); + } + if name.starts_with("xor_") { + destination.write_from_interval(self, arg0_interval)?; + let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false)); + let ans = lhs ^ rhs; + return arg0_interval + .write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); + } + if name.starts_with("nand_") { + destination.write_from_interval(self, arg0_interval)?; + let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false)); + let ans = !(lhs & rhs); + return arg0_interval + .write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]); + } + let Some(arg2) = args.get(2) else { + return Err(MirEvalError::TypeError("atomic intrinsic arg2 is not provided")); + }; + if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") { + let dest = if arg1.get(self)? == arg0_interval.get(self)? { + arg0_interval.write_from_interval(self, arg2.interval)?; + (arg1.interval, true) + } else { + (arg0_interval, false) + }; + let result_ty = TyKind::Tuple( + 2, + Substitution::from_iter(Interner, [ty.clone(), TyBuilder::bool()]), + ) + .intern(Interner); + let layout = self.layout(&result_ty)?; + let result = self.make_by_layout( + layout.size.bytes_usize(), + &layout, + None, + [ + IntervalOrOwned::Borrowed(dest.0), + IntervalOrOwned::Owned(vec![u8::from(dest.1)]), + ] + .into_iter(), + )?; + return destination.write_from_bytes(self, &result); + } + not_supported!("unknown atomic intrinsic {name}"); + } + match as_str { + "size_of" => { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("size_of generic arg is not provided")); + }; + let size = self.size_of_sized(ty, locals, "size_of arg")?; + destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size]) + } + "min_align_of" | "pref_align_of" => { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("align_of generic arg is not provided")); + }; + let align = self.layout_filled(ty, locals)?.align.abi.bytes(); + destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size]) + } + "needs_drop" => { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("size_of generic arg is not provided")); + }; + let result = !ty.clone().is_copy(self.db, locals.body.owner); + destination.write_from_bytes(self, &[u8::from(result)]) + } + "ptr_guaranteed_cmp" => { + // FIXME: this is wrong for const eval, it should return 2 in some + // cases. + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("wrapping_add args are not provided")); + }; + let ans = lhs.get(self)? == rhs.get(self)?; + destination.write_from_bytes(self, &[u8::from(ans)]) + } + "wrapping_add" => { + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("wrapping_add args are not provided")); + }; + let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); + let ans = lhs.wrapping_add(rhs); + destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) + } + "add_with_overflow" => { + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("const_eval_select args are not provided")); + }; + let result_ty = TyKind::Tuple( + 2, + Substitution::from_iter(Interner, [lhs.ty.clone(), TyBuilder::bool()]), + ) + .intern(Interner); + let op_size = + self.size_of_sized(&lhs.ty, locals, "operand of add_with_overflow")?; + let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); + let ans = lhs.wrapping_add(rhs); + let is_overflow = false; + let is_overflow = vec![u8::from(is_overflow)]; + let layout = self.layout(&result_ty)?; + let result = self.make_by_layout( + layout.size.bytes_usize(), + &layout, + None, + [ans.to_le_bytes()[0..op_size].to_vec(), is_overflow] + .into_iter() + .map(IntervalOrOwned::Owned), + )?; + destination.write_from_bytes(self, &result) + } + "copy" | "copy_nonoverlapping" => { + let [src, dst, offset] = args else { + return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided")); + }; + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided")); + }; + let src = Address::from_bytes(src.get(self)?)?; + let dst = Address::from_bytes(dst.get(self)?)?; + let offset = from_bytes!(usize, offset.get(self)?); + let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?; + let size = offset * size; + let src = Interval { addr: src, size }; + let dst = Interval { addr: dst, size }; + dst.write_from_interval(self, src) + } + "offset" | "arith_offset" => { + let [ptr, offset] = args else { + return Err(MirEvalError::TypeError("offset args are not provided")); + }; + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("offset generic arg is not provided")); + }; + let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false)); + let offset = u128::from_le_bytes(pad16(offset.get(self)?, false)); + let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128; + let ans = ptr + offset * size; + destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) + } + "assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" | "assume" => { + // FIXME: We should actually implement these checks + Ok(()) + } + "forget" => { + // We don't call any drop glue yet, so there is nothing here + Ok(()) + } + "transmute" => { + let [arg] = args else { + return Err(MirEvalError::TypeError("trasmute arg is not provided")); + }; + destination.write_from_interval(self, arg.interval) + } + "likely" | "unlikely" => { + let [arg] = args else { + return Err(MirEvalError::TypeError("likely arg is not provided")); + }; + destination.write_from_interval(self, arg.interval) + } + "const_eval_select" => { + let [tuple, const_fn, _] = args else { + return Err(MirEvalError::TypeError("const_eval_select args are not provided")); + }; + let mut args = vec![const_fn.clone()]; + let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else { + return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple")); + }; + let layout = self.layout(&tuple.ty)?; + for (i, field) in fields.iter(Interner).enumerate() { + let field = field.assert_ty_ref(Interner).clone(); + let offset = layout.fields.offset(i).bytes_usize(); + let addr = tuple.interval.addr.offset(offset); + args.push(IntervalAndTy::new(addr, field, self, locals)?); + } + self.exec_fn_trait(&args, destination, locals, span) + } + _ => not_supported!("unknown intrinsic {as_str}"), + } + } +} diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 37b27b398342..733f58e8f6b0 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -1,14 +1,15 @@ //! This module generates a polymorphic MIR from a hir body -use std::{iter, mem, sync::Arc}; +use std::{fmt::Write, iter, mem, sync::Arc}; +use base_db::FileId; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ body::Body, data::adt::{StructKind, VariantData}, hir::{ - Array, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId, - RecordFieldPat, RecordLitField, + ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, + Pat, PatId, RecordFieldPat, RecordLitField, }, lang_item::{LangItem, LangItemTarget}, path::Path, @@ -18,6 +19,7 @@ use hir_def::{ use hir_expand::name::Name; use la_arena::ArenaMap; use rustc_hash::FxHashMap; +use syntax::TextRange; use crate::{ consteval::ConstEvalError, @@ -27,8 +29,9 @@ use crate::{ inhabitedness::is_ty_uninhabited_from, layout::{layout_of_ty, LayoutError}, mapping::ToChalk, + method_resolution::lookup_impl_const, static_lifetime, - utils::generics, + utils::{generics, ClosureSubst}, Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt, }; @@ -62,14 +65,14 @@ struct MirLowerCtx<'a> { #[derive(Debug, Clone, PartialEq, Eq)] pub enum MirLowerError { - ConstEvalError(Box), + ConstEvalError(String, Box), LayoutError(LayoutError), IncompleteExpr, /// Trying to lower a trait function, instead of an implementation TraitFunctionDefinition(TraitId, Name), UnresolvedName(String), RecordLiteralWithoutPath, - UnresolvedMethod, + UnresolvedMethod(String), UnresolvedField, MissingFunctionDefinition, TypeMismatch(TypeMismatch), @@ -88,6 +91,46 @@ pub enum MirLowerError { UnaccessableLocal, } +impl MirLowerError { + pub fn pretty_print( + &self, + f: &mut String, + db: &dyn HirDatabase, + span_formatter: impl Fn(FileId, TextRange) -> String, + ) -> std::result::Result<(), std::fmt::Error> { + match self { + MirLowerError::ConstEvalError(name, e) => { + writeln!(f, "In evaluating constant {name}")?; + match &**e { + ConstEvalError::MirLowerError(e) => e.pretty_print(f, db, span_formatter)?, + ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?, + } + } + MirLowerError::LayoutError(_) + | MirLowerError::IncompleteExpr + | MirLowerError::UnaccessableLocal + | MirLowerError::TraitFunctionDefinition(_, _) + | MirLowerError::UnresolvedName(_) + | MirLowerError::RecordLiteralWithoutPath + | MirLowerError::UnresolvedMethod(_) + | MirLowerError::UnresolvedField + | MirLowerError::MissingFunctionDefinition + | MirLowerError::TypeMismatch(_) + | MirLowerError::TypeError(_) + | MirLowerError::NotSupported(_) + | MirLowerError::ContinueWithoutLoop + | MirLowerError::BreakWithoutLoop + | MirLowerError::Loop + | MirLowerError::ImplementationError(_) + | MirLowerError::LangItemNotFound(_) + | MirLowerError::MutatingRvalue + | MirLowerError::UnresolvedLabel + | MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{:?}", self)?, + } + Ok(()) + } +} + macro_rules! not_supported { ($x: expr) => { return Err(MirLowerError::NotSupported(format!($x))) @@ -101,15 +144,6 @@ macro_rules! implementation_error { }}; } -impl From for MirLowerError { - fn from(value: ConstEvalError) -> Self { - match value { - ConstEvalError::MirLowerError(e) => e, - _ => MirLowerError::ConstEvalError(Box::new(value)), - } - } -} - impl From for MirLowerError { fn from(value: LayoutError) -> Self { MirLowerError::LayoutError(value) @@ -177,7 +211,7 @@ impl<'ctx> MirLowerCtx<'ctx> { if !self.has_adjustments(expr_id) { match &self.body.exprs[expr_id] { Expr::Literal(l) => { - let ty = self.expr_ty(expr_id); + let ty = self.expr_ty_without_adjust(expr_id); return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); } _ => (), @@ -282,7 +316,7 @@ impl<'ctx> MirLowerCtx<'ctx> { { match assoc { hir_def::AssocItemId::ConstId(c) => { - self.lower_const(c, current, place, subst, expr_id.into())?; + self.lower_const(c, current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?; return Ok(Some(current)) }, hir_def::AssocItemId::FunctionId(_) => { @@ -309,17 +343,20 @@ impl<'ctx> MirLowerCtx<'ctx> { } }; match pr { - ValueNs::LocalBinding(pat_id) => { + ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => { + let Some((temp, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, false)? else { + return Ok(None); + }; self.push_assignment( current, place, - Operand::Copy(self.binding_local(pat_id)?.into()).into(), + Operand::Copy(temp).into(), expr_id.into(), ); Ok(Some(current)) } ValueNs::ConstId(const_id) => { - self.lower_const(const_id, current, place, Substitution::empty(Interner), expr_id.into())?; + self.lower_const(const_id, current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?; Ok(Some(current)) } ValueNs::EnumVariantId(variant_id) => { @@ -343,7 +380,7 @@ impl<'ctx> MirLowerCtx<'ctx> { not_supported!("owner without generic def id"); }; let gen = generics(self.db.upcast(), def); - let ty = self.expr_ty(expr_id); + let ty = self.expr_ty_without_adjust(expr_id); self.push_assignment( current, place, @@ -388,12 +425,13 @@ impl<'ctx> MirLowerCtx<'ctx> { }; self.set_terminator( current, - Terminator::SwitchInt { + TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, start_of_then, start_of_else), }, + expr_id.into(), ); - Ok(self.merge_blocks(end_of_then, end_of_else)) + Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into())) } Expr::Let { pat, expr } => { let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else { @@ -423,32 +461,32 @@ impl<'ctx> MirLowerCtx<'ctx> { MirSpan::Unknown, )?; } - Ok(self.merge_blocks(Some(then_target), else_target)) + Ok(self.merge_blocks(Some(then_target), else_target, expr_id.into())) } Expr::Unsafe { id: _, statements, tail } => { - self.lower_block_to_place(statements, current, *tail, place) + self.lower_block_to_place(statements, current, *tail, place, expr_id.into()) } Expr::Block { id: _, statements, tail, label } => { if let Some(label) = label { - self.lower_loop(current, place.clone(), Some(*label), |this, begin| { - if let Some(block) = this.lower_block_to_place(statements, begin, *tail, place)? { + self.lower_loop(current, place.clone(), Some(*label), expr_id.into(), |this, begin| { + if let Some(block) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? { let end = this.current_loop_end()?; - this.set_goto(block, end); + this.set_goto(block, end, expr_id.into()); } Ok(()) }) } else { - self.lower_block_to_place(statements, current, *tail, place) + self.lower_block_to_place(statements, current, *tail, place, expr_id.into()) } } - Expr::Loop { body, label } => self.lower_loop(current, place, *label, |this, begin| { + Expr::Loop { body, label } => self.lower_loop(current, place, *label, expr_id.into(), |this, begin| { if let Some((_, block)) = this.lower_expr_as_place(begin, *body, true)? { - this.set_goto(block, begin); + this.set_goto(block, begin, expr_id.into()); } Ok(()) }), Expr::While { condition, body, label } => { - self.lower_loop(current, place, *label, |this, begin| { + self.lower_loop(current, place, *label, expr_id.into(),|this, begin| { let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else { return Ok(()); }; @@ -456,13 +494,14 @@ impl<'ctx> MirLowerCtx<'ctx> { let after_cond = this.new_basic_block(); this.set_terminator( to_switch, - Terminator::SwitchInt { + TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, after_cond, end), }, + expr_id.into(), ); if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? { - this.set_goto(block, begin); + this.set_goto(block, begin, expr_id.into()); } Ok(()) }) @@ -478,12 +517,12 @@ impl<'ctx> MirLowerCtx<'ctx> { let into_iter_fn_op = Operand::const_zst( TyKind::FnDef( self.db.intern_callable_def(CallableDefId::FunctionId(into_iter_fn)).into(), - Substitution::from1(Interner, self.expr_ty(iterable)) + Substitution::from1(Interner, self.expr_ty_without_adjust(iterable)) ).intern(Interner)); let iter_next_fn_op = Operand::const_zst( TyKind::FnDef( self.db.intern_callable_def(CallableDefId::FunctionId(iter_next_fn)).into(), - Substitution::from1(Interner, self.expr_ty(iterable)) + Substitution::from1(Interner, self.expr_ty_without_adjust(iterable)) ).intern(Interner)); let &Some(iterator_ty) = &self.infer.type_of_for_iterator.get(&expr_id) else { return Err(MirLowerError::TypeError("unknown for loop iterator type")); @@ -494,13 +533,13 @@ impl<'ctx> MirLowerCtx<'ctx> { let iterator_place: Place = self.temp(iterator_ty.clone())?.into(); let option_item_place: Place = self.temp(option_item_ty.clone())?.into(); let ref_mut_iterator_place: Place = self.temp(ref_mut_iterator_ty)?.into(); - let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false)? + let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false, expr_id.into())? else { return Ok(None); }; self.push_assignment(current, ref_mut_iterator_place.clone(), Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, iterator_place), expr_id.into()); - self.lower_loop(current, place, label, |this, begin| { - let Some(current) = this.lower_call(iter_next_fn_op, vec![Operand::Copy(ref_mut_iterator_place)], option_item_place.clone(), begin, false)? + self.lower_loop(current, place, label, expr_id.into(), |this, begin| { + let Some(current) = this.lower_call(iter_next_fn_op, vec![Operand::Copy(ref_mut_iterator_place)], option_item_place.clone(), begin, false, expr_id.into())? else { return Ok(()); }; @@ -516,7 +555,7 @@ impl<'ctx> MirLowerCtx<'ctx> { AdtPatternShape::Tuple { args: &[pat], ellipsis: None }, )?; if let Some((_, block)) = this.lower_expr_as_place(current, body, true)? { - this.set_goto(block, begin); + this.set_goto(block, begin, expr_id.into()); } Ok(()) }) @@ -536,39 +575,36 @@ impl<'ctx> MirLowerCtx<'ctx> { place, current, self.is_uninhabited(expr_id), + expr_id.into(), ); } let callee_ty = self.expr_ty_after_adjustments(*callee); match &callee_ty.data(Interner).kind { chalk_ir::TyKind::FnDef(..) => { let func = Operand::from_bytes(vec![], callee_ty.clone()); - self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id)) + self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into()) } chalk_ir::TyKind::Function(_) => { let Some((func, current)) = self.lower_expr_to_some_operand(*callee, current)? else { return Ok(None); }; - self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id)) + self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into()) } TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition), _ => return Err(MirLowerError::TypeError("function call on bad type")), } } - Expr::MethodCall { receiver, args, .. } => { + Expr::MethodCall { receiver, args, method_name, .. } => { let (func_id, generic_args) = - self.infer.method_resolution(expr_id).ok_or(MirLowerError::UnresolvedMethod)?; - let ty = chalk_ir::TyKind::FnDef( - CallableDefId::FunctionId(func_id).to_chalk(self.db), - generic_args, - ) - .intern(Interner); - let func = Operand::from_bytes(vec![], ty); + self.infer.method_resolution(expr_id).ok_or_else(|| MirLowerError::UnresolvedMethod(format!("{}", method_name)))?; + let func = Operand::from_fn(self.db, func_id, generic_args); self.lower_call_and_args( func, iter::once(*receiver).chain(args.iter().copied()), place, current, self.is_uninhabited(expr_id), + expr_id.into(), ) } Expr::Match { expr, arms } => { @@ -591,7 +627,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let next = self.new_basic_block(); let o = otherwise.get_or_insert_with(|| self.new_basic_block()); if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? { - self.set_terminator(c, Terminator::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }); + self.set_terminator(c, TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }, expr_id.into()); } next } else { @@ -599,7 +635,7 @@ impl<'ctx> MirLowerCtx<'ctx> { }; if let Some(block) = self.lower_expr_to_place(*expr, place.clone(), then)? { let r = end.get_or_insert_with(|| self.new_basic_block()); - self.set_goto(block, *r); + self.set_goto(block, *r, expr_id.into()); } match otherwise { Some(o) => current = o, @@ -611,18 +647,17 @@ impl<'ctx> MirLowerCtx<'ctx> { } } if self.is_unterminated(current) { - self.set_terminator(current, Terminator::Unreachable); + self.set_terminator(current, TerminatorKind::Unreachable, expr_id.into()); } Ok(end) } - Expr::Continue { label } => match label { - Some(_) => not_supported!("continue with label"), - None => { - let loop_data = - self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?; - self.set_goto(current, loop_data.begin); - Ok(None) - } + Expr::Continue { label } => { + let loop_data = match label { + Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?, + None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?, + }; + self.set_goto(current, loop_data.begin, expr_id.into()); + Ok(None) }, &Expr::Break { expr, label } => { if let Some(expr) = expr { @@ -639,7 +674,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Some(l) => self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?.end.expect("We always generate end for labeled loops"), None => self.current_loop_end()?, }; - self.set_goto(current, end); + self.set_goto(current, end, expr_id.into()); Ok(None) } Expr::Return { expr } => { @@ -650,7 +685,7 @@ impl<'ctx> MirLowerCtx<'ctx> { return Ok(None); } } - self.set_terminator(current, Terminator::Return); + self.set_terminator(current, TerminatorKind::Return, expr_id.into()); Ok(None) } Expr::Yield { .. } => not_supported!("yield"), @@ -672,7 +707,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()), None => MirLowerError::RecordLiteralWithoutPath, })?; - let subst = match self.expr_ty(expr_id).kind(Interner) { + let subst = match self.expr_ty_without_adjust(expr_id).kind(Interner) { TyKind::Adt(_, s) => s.clone(), _ => not_supported!("Non ADT record literal"), }; @@ -757,7 +792,17 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into()); Ok(Some(current)) } - Expr::Box { .. } => not_supported!("box expression"), + Expr::Box { expr } => { + let ty = self.expr_ty_after_adjustments(*expr); + self.push_assignment(current, place.clone(), Rvalue::ShallowInitBoxWithAlloc(ty), expr_id.into()); + let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else { + return Ok(None); + }; + let mut p = place; + p.projection.push(ProjectionElem::Deref); + self.push_assignment(current, p, operand.into(), expr_id.into()); + Ok(Some(current)) + }, Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => { let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else { return Ok(None); @@ -784,20 +829,63 @@ impl<'ctx> MirLowerCtx<'ctx> { }, Expr::BinaryOp { lhs, rhs, op } => { let op = op.ok_or(MirLowerError::IncompleteExpr)?; - if let hir_def::hir::BinaryOp::Assignment { op } = op { - if op.is_some() { - not_supported!("assignment with arith op (like +=)"); + let is_builtin = { + // Without adjust here is a hack. We assume that we know every possible adjustment + // for binary operator, and use without adjust to simplify our conditions. + let lhs_ty = self.expr_ty_without_adjust(*lhs); + let rhs_ty = self.expr_ty_without_adjust(*rhs); + let builtin_inequal_impls = matches!( + op, + BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) } + ); + lhs_ty.as_builtin().is_some() && rhs_ty.as_builtin().is_some() && (lhs_ty == rhs_ty || builtin_inequal_impls) + }; + if !is_builtin { + if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) { + let func = Operand::from_fn(self.db, func_id, generic_args); + return self.lower_call_and_args( + func, + [*lhs, *rhs].into_iter(), + place, + current, + self.is_uninhabited(expr_id), + expr_id.into(), + ); } - let Some((lhs_place, current)) = + } + if let hir_def::hir::BinaryOp::Assignment { op } = op { + if let Some(op) = op { + // last adjustment is `&mut` which we don't want it. + let adjusts = self + .infer + .expr_adjustments + .get(lhs) + .and_then(|x| x.split_last()) + .map(|x| x.1) + .ok_or(MirLowerError::TypeError("adjustment of binary op was missing"))?; + let Some((lhs_place, current)) = + self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)? + else { + return Ok(None); + }; + let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else { + return Ok(None); + }; + let r_value = Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place.clone()), rhs_op); + self.push_assignment(current, lhs_place, r_value, expr_id.into()); + return Ok(Some(current)); + } else { + let Some((lhs_place, current)) = self.lower_expr_as_place(current, *lhs, false)? - else { - return Ok(None); - }; - let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else { - return Ok(None); - }; - self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into()); - return Ok(Some(current)); + else { + return Ok(None); + }; + let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else { + return Ok(None); + }; + self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into()); + return Ok(Some(current)); + } } let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else { return Ok(None); @@ -826,7 +914,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) } &Expr::Range { lhs, rhs, range_type: _ } => { - let ty = self.expr_ty(expr_id); + let ty = self.expr_ty_without_adjust(expr_id); let Some((adt, subst)) = ty.as_adt() else { return Err(MirLowerError::TypeError("Range type is not adt")); }; @@ -869,7 +957,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) }, Expr::Closure { .. } => { - let ty = self.expr_ty(expr_id); + let ty = self.expr_ty_without_adjust(expr_id); let TyKind::Closure(id, _) = ty.kind(Interner) else { not_supported!("closure with non closure type"); }; @@ -893,7 +981,12 @@ impl<'ctx> MirLowerCtx<'ctx> { }; match &capture.kind { CaptureKind::ByRef(bk) => { - let tmp: Place = self.temp(capture.ty.clone())?.into(); + let placeholder_subst = match self.owner.as_generic_def_id() { + Some(x) => TyBuilder::placeholder_subst(self.db, x), + None => Substitution::empty(Interner), + }; + let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst); + let tmp: Place = self.temp(tmp_ty)?.into(); self.push_assignment( current, tmp.clone(), @@ -928,7 +1021,7 @@ impl<'ctx> MirLowerCtx<'ctx> { return Ok(None); }; let r = Rvalue::Aggregate( - AggregateKind::Tuple(self.expr_ty(expr_id)), + AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id)), values, ); self.push_assignment(current, place, r, expr_id.into()); @@ -936,7 +1029,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } Expr::Array(l) => match l { Array::ElementList { elements, .. } => { - let elem_ty = match &self.expr_ty(expr_id).data(Interner).kind { + let elem_ty = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind { TyKind::Array(ty, _) => ty.clone(), _ => { return Err(MirLowerError::TypeError( @@ -968,7 +1061,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let Some((init, current)) = self.lower_expr_to_some_operand(*initializer, current)? else { return Ok(None); }; - let len = match &self.expr_ty(expr_id).data(Interner).kind { + let len = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind { TyKind::Array(_, len) => len.clone(), _ => { return Err(MirLowerError::TypeError( @@ -982,7 +1075,7 @@ impl<'ctx> MirLowerCtx<'ctx> { }, }, Expr::Literal(l) => { - let ty = self.expr_ty(expr_id); + let ty = self.expr_ty_without_adjust(expr_id); let op = self.lower_literal_to_operand(ty, l)?; self.push_assignment(current, place, op.into(), expr_id.into()); Ok(Some(current)) @@ -1057,8 +1150,30 @@ impl<'ctx> MirLowerCtx<'ctx> { place: Place, subst: Substitution, span: MirSpan, + ty: Ty, ) -> Result<()> { - let c = self.db.const_eval(const_id, subst)?; + let c = if subst.len(Interner) != 0 { + // We can't evaluate constant with substitution now, as generics are not monomorphized in lowering. + intern_const_scalar(ConstScalar::UnevaluatedConst(const_id, subst), ty) + } else { + let (const_id, subst) = lookup_impl_const( + self.db, + self.db.trait_environment_for_body(self.owner), + const_id, + subst, + ); + let name = self + .db + .const_data(const_id) + .name + .as_ref() + .and_then(|x| x.as_str()) + .unwrap_or("_") + .to_owned(); + self.db + .const_eval(const_id.into(), subst) + .map_err(|e| MirLowerError::ConstEvalError(name, Box::new(e)))? + }; self.write_const_to_place(c, prev_block, place, span) } @@ -1114,6 +1229,7 @@ impl<'ctx> MirLowerCtx<'ctx> { place: Place, mut current: BasicBlockId, is_uninhabited: bool, + span: MirSpan, ) -> Result> { let Some(args) = args .map(|arg| { @@ -1128,7 +1244,7 @@ impl<'ctx> MirLowerCtx<'ctx> { else { return Ok(None); }; - self.lower_call(func, args, place, current, is_uninhabited) + self.lower_call(func, args, place, current, is_uninhabited, span) } fn lower_call( @@ -1138,11 +1254,12 @@ impl<'ctx> MirLowerCtx<'ctx> { place: Place, current: BasicBlockId, is_uninhabited: bool, + span: MirSpan, ) -> Result> { let b = if is_uninhabited { None } else { Some(self.new_basic_block()) }; self.set_terminator( current, - Terminator::Call { + TerminatorKind::Call { func, args, destination: place, @@ -1150,6 +1267,7 @@ impl<'ctx> MirLowerCtx<'ctx> { cleanup: None, from_hir_call: true, }, + span, ); Ok(b) } @@ -1158,15 +1276,15 @@ impl<'ctx> MirLowerCtx<'ctx> { self.result.basic_blocks[source].terminator.is_none() } - fn set_terminator(&mut self, source: BasicBlockId, terminator: Terminator) { - self.result.basic_blocks[source].terminator = Some(terminator); + fn set_terminator(&mut self, source: BasicBlockId, terminator: TerminatorKind, span: MirSpan) { + self.result.basic_blocks[source].terminator = Some(Terminator { span, kind: terminator }); } - fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId) { - self.set_terminator(source, Terminator::Goto { target }); + fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId, span: MirSpan) { + self.set_terminator(source, TerminatorKind::Goto { target }, span); } - fn expr_ty(&self, e: ExprId) -> Ty { + fn expr_ty_without_adjust(&self, e: ExprId) -> Ty { self.infer[e].clone() } @@ -1177,7 +1295,7 @@ impl<'ctx> MirLowerCtx<'ctx> { ty = Some(x.target.clone()); } } - ty.unwrap_or_else(|| self.expr_ty(e)) + ty.unwrap_or_else(|| self.expr_ty_without_adjust(e)) } fn push_statement(&mut self, block: BasicBlockId, statement: Statement) { @@ -1211,6 +1329,7 @@ impl<'ctx> MirLowerCtx<'ctx> { prev_block: BasicBlockId, place: Place, label: Option, + span: MirSpan, f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>, ) -> Result> { let begin = self.new_basic_block(); @@ -1228,7 +1347,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } else { None }; - self.set_goto(prev_block, begin); + self.set_goto(prev_block, begin, span); f(self, begin)?; let my = mem::replace(&mut self.current_loop_blocks, prev).ok_or( MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_string()), @@ -1247,14 +1366,15 @@ impl<'ctx> MirLowerCtx<'ctx> { &mut self, b1: Option, b2: Option, + span: MirSpan, ) -> Option { match (b1, b2) { (None, None) => None, (None, Some(b)) | (Some(b), None) => Some(b), (Some(b1), Some(b2)) => { let bm = self.new_basic_block(); - self.set_goto(b1, bm); - self.set_goto(b2, bm); + self.set_goto(b1, bm, span); + self.set_goto(b2, bm, span); Some(bm) } } @@ -1332,6 +1452,7 @@ impl<'ctx> MirLowerCtx<'ctx> { mut current: BasicBlockId, tail: Option, place: Place, + span: MirSpan, ) -> Result>> { for statement in statements.iter() { match statement { @@ -1355,13 +1476,13 @@ impl<'ctx> MirLowerCtx<'ctx> { match (else_block, else_branch) { (None, _) => (), (Some(else_block), None) => { - self.set_terminator(else_block, Terminator::Unreachable); + self.set_terminator(else_block, TerminatorKind::Unreachable, span); } (Some(else_block), Some(else_branch)) => { if let Some((_, b)) = self.lower_expr_as_place(else_block, *else_branch, true)? { - self.set_terminator(b, Terminator::Unreachable); + self.set_terminator(b, TerminatorKind::Unreachable, span); } } } @@ -1438,7 +1559,7 @@ impl<'ctx> MirLowerCtx<'ctx> { BindingAnnotation::Unannotated, )?; if let Some(b) = r.1 { - self.set_terminator(b, Terminator::Unreachable); + self.set_terminator(b, TerminatorKind::Unreachable, param.into()); } current = r.0; } @@ -1456,6 +1577,18 @@ impl<'ctx> MirLowerCtx<'ctx> { } } } + + fn const_eval_discriminant(&self, variant: EnumVariantId) -> Result { + let r = self.db.const_eval_discriminant(variant); + match r { + Ok(r) => Ok(r), + Err(e) => { + let data = self.db.enum_data(variant.parent); + let name = format!("{}::{}", data.name, data.variants[variant.local_id].name); + Err(MirLowerError::ConstEvalError(name, Box::new(e))) + } + } + } } fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result { @@ -1498,7 +1631,7 @@ pub fn mir_body_for_closure_query( // 0 is return local ctx.result.locals.alloc(Local { ty: infer[*root].clone() }); ctx.result.locals.alloc(Local { ty: infer[expr].clone() }); - let Some(sig) = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(db) else { + let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else { implementation_error!("closure has not callable sig"); }; let current = ctx.lower_params_and_bindings( @@ -1506,7 +1639,7 @@ pub fn mir_body_for_closure_query( |_| true, )?; if let Some(b) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? { - ctx.set_terminator(b, Terminator::Return); + ctx.set_terminator(b, TerminatorKind::Return, (*root).into()); } let mut upvar_map: FxHashMap> = FxHashMap::default(); for (i, capture) in captures.iter().enumerate() { @@ -1628,7 +1761,7 @@ pub fn lower_to_mir( ctx.lower_params_and_bindings([].into_iter(), binding_picker)? }; if let Some(b) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? { - ctx.set_terminator(b, Terminator::Return); + ctx.set_terminator(b, TerminatorKind::Return, root_expr.into()); } Ok(ctx.result) } diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index a1574f559369..8d8870da0d4f 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs @@ -1,7 +1,7 @@ //! MIR lowering for places use super::*; -use hir_def::FunctionId; +use hir_def::{lang_item::lang_attr, FunctionId}; use hir_expand::name; macro_rules! not_supported { @@ -16,7 +16,7 @@ impl MirLowerCtx<'_> { expr_id: ExprId, prev_block: BasicBlockId, ) -> Result> { - let ty = self.expr_ty(expr_id); + let ty = self.expr_ty_without_adjust(expr_id); let place = self.temp(ty)?; let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else { return Ok(None); @@ -30,8 +30,10 @@ impl MirLowerCtx<'_> { prev_block: BasicBlockId, adjustments: &[Adjustment], ) -> Result> { - let ty = - adjustments.last().map(|x| x.target.clone()).unwrap_or_else(|| self.expr_ty(expr_id)); + let ty = adjustments + .last() + .map(|x| x.target.clone()) + .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)); let place = self.temp(ty)?; let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else { return Ok(None); @@ -80,7 +82,7 @@ impl MirLowerCtx<'_> { r, rest.last() .map(|x| x.target.clone()) - .unwrap_or_else(|| self.expr_ty(expr_id)), + .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)), last.target.clone(), expr_id.into(), match od.0 { @@ -135,17 +137,39 @@ impl MirLowerCtx<'_> { }; match pr { ValueNs::LocalBinding(pat_id) => { - Ok(Some((self.result.binding_locals[pat_id].into(), current))) + Ok(Some((self.binding_local(pat_id)?.into(), current))) + } + ValueNs::StaticId(s) => { + let ty = self.expr_ty_without_adjust(expr_id); + let ref_ty = + TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner); + let mut temp: Place = self.temp(ref_ty)?.into(); + self.push_assignment( + current, + temp.clone(), + Operand::Static(s).into(), + expr_id.into(), + ); + temp.projection.push(ProjectionElem::Deref); + Ok(Some((temp, current))) } _ => try_rvalue(self), } } Expr::UnaryOp { expr, op } => match op { hir_def::hir::UnaryOp::Deref => { - if !matches!( - self.expr_ty(*expr).kind(Interner), - TyKind::Ref(..) | TyKind::Raw(..) - ) { + let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) { + TyKind::Ref(..) | TyKind::Raw(..) => true, + TyKind::Adt(id, _) => { + if let Some(lang_item) = lang_attr(self.db.upcast(), id.0) { + lang_item == LangItem::OwnedBox + } else { + false + } + } + _ => false, + }; + if !is_builtin { let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else { return Ok(None); }; @@ -153,7 +177,7 @@ impl MirLowerCtx<'_> { current, p, self.expr_ty_after_adjustments(*expr), - self.expr_ty(expr_id), + self.expr_ty_without_adjust(expr_id), expr_id.into(), 'b: { if let Some((f, _)) = self.infer.method_resolution(expr_id) { @@ -198,7 +222,7 @@ impl MirLowerCtx<'_> { ) { let Some(index_fn) = self.infer.method_resolution(expr_id) else { - return Err(MirLowerError::UnresolvedMethod); + return Err(MirLowerError::UnresolvedMethod("[overloaded index]".to_string())); }; let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else { return Ok(None); @@ -210,7 +234,7 @@ impl MirLowerCtx<'_> { current, base_place, base_ty, - self.expr_ty(expr_id), + self.expr_ty_without_adjust(expr_id), index_operand, expr_id.into(), index_fn, @@ -266,7 +290,7 @@ impl MirLowerCtx<'_> { ) .intern(Interner), ); - let Some(current) = self.lower_call(index_fn_op, vec![Operand::Copy(ref_place), index_operand], result.clone(), current, false)? else { + let Some(current) = self.lower_call(index_fn_op, vec![Operand::Copy(ref_place), index_operand], result.clone(), current, false, span)? else { return Ok(None); }; result.projection.push(ProjectionElem::Deref); @@ -313,7 +337,7 @@ impl MirLowerCtx<'_> { .intern(Interner), ); let mut result: Place = self.temp(target_ty_ref)?.into(); - let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false)? else { + let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false, span)? else { return Ok(None); }; result.projection.push(ProjectionElem::Deref); diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs index 86c3ce9eec98..97aa3b0f2150 100644 --- a/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,5 +1,7 @@ //! MIR lowering for patterns +use hir_def::resolver::HasResolver; + use crate::utils::pattern_matching_dereference_count; use super::*; @@ -72,7 +74,7 @@ impl MirLowerCtx<'_> { *pat, binding_mode, )?; - self.set_goto(next, then_target); + self.set_goto(next, then_target, pattern.into()); match next_else { Some(t) => { current = t; @@ -85,13 +87,13 @@ impl MirLowerCtx<'_> { } if !finished { let ce = *current_else.get_or_insert_with(|| self.new_basic_block()); - self.set_goto(current, ce); + self.set_goto(current, ce, pattern.into()); } (then_target, current_else) } Pat::Record { args, .. } => { let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { - not_supported!("unresolved variant"); + not_supported!("unresolved variant for record"); }; self.pattern_matching_variant( cond_ty, @@ -106,11 +108,8 @@ impl MirLowerCtx<'_> { } Pat::Range { .. } => not_supported!("range pattern"), Pat::Slice { .. } => not_supported!("slice pattern"), - Pat::Path(_) => { - let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { - not_supported!("unresolved variant"); - }; - self.pattern_matching_variant( + Pat::Path(p) => match self.infer.variant_resolution_for_pat(pattern) { + Some(variant) => self.pattern_matching_variant( cond_ty, binding_mode, cond_place, @@ -119,8 +118,60 @@ impl MirLowerCtx<'_> { pattern.into(), current_else, AdtPatternShape::Unit, - )? - } + )?, + None => { + let unresolved_name = || MirLowerError::unresolved_path(self.db, p); + let resolver = self.owner.resolver(self.db.upcast()); + let pr = resolver + .resolve_path_in_value_ns(self.db.upcast(), p) + .ok_or_else(unresolved_name)?; + match pr { + ResolveValueResult::ValueNs(v) => match v { + ValueNs::ConstId(c) => { + let tmp: Place = self.temp(cond_ty.clone())?.into(); + let span = pattern.into(); + self.lower_const( + c, + current, + tmp.clone(), + Substitution::empty(Interner), + span, + cond_ty.clone(), + )?; + let tmp2: Place = self.temp(TyBuilder::bool())?.into(); + self.push_assignment( + current, + tmp2.clone(), + Rvalue::CheckedBinaryOp( + BinOp::Eq, + Operand::Copy(tmp), + Operand::Copy(cond_place), + ), + span, + ); + let next = self.new_basic_block(); + let else_target = + current_else.unwrap_or_else(|| self.new_basic_block()); + self.set_terminator( + current, + TerminatorKind::SwitchInt { + discr: Operand::Copy(tmp2), + targets: SwitchTargets::static_if(1, next, else_target), + }, + span, + ); + (next, Some(else_target)) + } + _ => not_supported!( + "path in pattern position that is not const or variant" + ), + }, + ResolveValueResult::Partial(_, _) => { + not_supported!("assoc const in patterns") + } + } + } + }, Pat::Lit(l) => match &self.body.exprs[*l] { Expr::Literal(l) => { let c = self.lower_literal_to_operand(cond_ty, l)?; @@ -218,10 +269,11 @@ impl MirLowerCtx<'_> { let discr = Operand::Copy(discr); self.set_terminator( current, - Terminator::SwitchInt { + TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, then_target, else_target), }, + pattern.into(), ); Ok((then_target, Some(else_target))) } @@ -244,8 +296,7 @@ impl MirLowerCtx<'_> { }; Ok(match variant { VariantId::EnumVariantId(v) => { - let e = self.db.const_eval_discriminant(v)? as u128; - let next = self.new_basic_block(); + let e = self.const_eval_discriminant(v)? as u128; let tmp = self.discr_temp_place(); self.push_assignment( current, @@ -253,13 +304,15 @@ impl MirLowerCtx<'_> { Rvalue::Discriminant(cond_place.clone()), span, ); + let next = self.new_basic_block(); let else_target = current_else.unwrap_or_else(|| self.new_basic_block()); self.set_terminator( current, - Terminator::SwitchInt { + TerminatorKind::SwitchInt { discr: Operand::Copy(tmp), targets: SwitchTargets::static_if(e, next, else_target), }, + span, ); let enum_data = self.db.enum_data(v.parent); self.pattern_matching_variant_fields( diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index a7dff3961351..257860968c46 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -11,19 +11,52 @@ use la_arena::ArenaMap; use crate::{ db::HirDatabase, - display::HirDisplay, - mir::{PlaceElem, ProjectionElem, StatementKind, Terminator}, + display::{ClosureStyle, HirDisplay}, + mir::{PlaceElem, ProjectionElem, StatementKind, TerminatorKind}, + ClosureId, }; use super::{ AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, Operand, Place, Rvalue, UnOp, }; +macro_rules! w { + ($dst:expr, $($arg:tt)*) => { + { let _ = write!($dst, $($arg)*); } + }; +} + +macro_rules! wln { + ($dst:expr) => { + { let _ = writeln!($dst); } + }; + ($dst:expr, $($arg:tt)*) => { + { let _ = writeln!($dst, $($arg)*); } + }; +} + impl MirBody { pub fn pretty_print(&self, db: &dyn HirDatabase) -> String { let hir_body = db.body(self.owner); let mut ctx = MirPrettyCtx::new(self, &hir_body, db); - ctx.for_body(ctx.body.owner); + ctx.for_body(|this| match ctx.body.owner { + hir_def::DefWithBodyId::FunctionId(id) => { + let data = db.function_data(id); + w!(this, "fn {}() ", data.name); + } + hir_def::DefWithBodyId::StaticId(id) => { + let data = db.static_data(id); + w!(this, "static {}: _ = ", data.name); + } + hir_def::DefWithBodyId::ConstId(id) => { + let data = db.const_data(id); + w!(this, "const {}: _ = ", data.name.as_ref().unwrap_or(&Name::missing())); + } + hir_def::DefWithBodyId::VariantId(id) => { + let data = db.enum_data(id.parent); + w!(this, "enum {} = ", data.name); + } + }); ctx.result } @@ -49,21 +82,6 @@ struct MirPrettyCtx<'a> { local_to_binding: ArenaMap, } -macro_rules! w { - ($dst:expr, $($arg:tt)*) => { - { let _ = write!($dst, $($arg)*); } - }; -} - -macro_rules! wln { - ($dst:expr) => { - { let _ = writeln!($dst); } - }; - ($dst:expr, $($arg:tt)*) => { - { let _ = writeln!($dst, $($arg)*); } - }; -} - impl Write for MirPrettyCtx<'_> { fn write_str(&mut self, s: &str) -> std::fmt::Result { let mut it = s.split('\n'); // note: `.lines()` is wrong here @@ -91,36 +109,40 @@ impl Display for LocalName { } impl<'a> MirPrettyCtx<'a> { - fn for_body(&mut self, name: impl Debug) { - wln!(self, "// {:?}", name); + fn for_body(&mut self, name: impl FnOnce(&mut MirPrettyCtx<'_>)) { + name(self); self.with_block(|this| { this.locals(); wln!(this); this.blocks(); }); for &closure in &self.body.closures { - let body = match self.db.mir_body_for_closure(closure) { - Ok(x) => x, - Err(e) => { - wln!(self, "// error in {closure:?}: {e:?}"); - continue; - } - }; - let result = mem::take(&mut self.result); - let indent = mem::take(&mut self.indent); - let mut ctx = MirPrettyCtx { - body: &body, - local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(), - result, - indent, - ..*self - }; - ctx.for_body(closure); - self.result = ctx.result; - self.indent = ctx.indent; + self.for_closure(closure); } } + fn for_closure(&mut self, closure: ClosureId) { + let body = match self.db.mir_body_for_closure(closure) { + Ok(x) => x, + Err(e) => { + wln!(self, "// error in {closure:?}: {e:?}"); + return; + } + }; + let result = mem::take(&mut self.result); + let indent = mem::take(&mut self.indent); + let mut ctx = MirPrettyCtx { + body: &body, + local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(), + result, + indent, + ..*self + }; + ctx.for_body(|this| wln!(this, "// Closure: {:?}", closure)); + self.result = ctx.result; + self.indent = ctx.indent; + } + fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_>)) { self.indent += " "; wln!(self, "{{"); @@ -155,7 +177,7 @@ impl<'a> MirPrettyCtx<'a> { fn locals(&mut self) { for (id, local) in self.body.locals.iter() { - wln!(self, "let {}: {};", self.local_name(id), local.ty.display(self.db)); + wln!(self, "let {}: {};", self.local_name(id), self.hir_display(&local.ty)); } } @@ -198,11 +220,11 @@ impl<'a> MirPrettyCtx<'a> { } } match &block.terminator { - Some(terminator) => match terminator { - Terminator::Goto { target } => { + Some(terminator) => match &terminator.kind { + TerminatorKind::Goto { target } => { wln!(this, "goto 'bb{};", u32::from(target.into_raw())) } - Terminator::SwitchInt { discr, targets } => { + TerminatorKind::SwitchInt { discr, targets } => { w!(this, "switch "); this.operand(discr); w!(this, " "); @@ -213,7 +235,7 @@ impl<'a> MirPrettyCtx<'a> { wln!(this, "_ => {},", this.basic_block_id(targets.otherwise())); }); } - Terminator::Call { func, args, destination, target, .. } => { + TerminatorKind::Call { func, args, destination, target, .. } => { w!(this, "Call "); this.with_block(|this| { w!(this, "func: "); @@ -295,7 +317,8 @@ impl<'a> MirPrettyCtx<'a> { // equally. Feel free to change it. self.place(p); } - Operand::Constant(c) => w!(self, "Const({})", c.display(self.db)), + Operand::Constant(c) => w!(self, "Const({})", self.hir_display(c)), + Operand::Static(s) => w!(self, "Static({:?})", s), } } @@ -349,7 +372,7 @@ impl<'a> MirPrettyCtx<'a> { Rvalue::Cast(ck, op, ty) => { w!(self, "Cast({ck:?}, "); self.operand(op); - w!(self, ", {})", ty.display(self.db)); + w!(self, ", {})", self.hir_display(ty)); } Rvalue::CheckedBinaryOp(b, o1, o2) => { self.operand(o1); @@ -369,6 +392,7 @@ impl<'a> MirPrettyCtx<'a> { self.place(p); w!(self, ")"); } + Rvalue::ShallowInitBoxWithAlloc(_) => w!(self, "ShallowInitBoxWithAlloc"), Rvalue::ShallowInitBox(op, _) => { w!(self, "ShallowInitBox("); self.operand(op); @@ -392,4 +416,8 @@ impl<'a> MirPrettyCtx<'a> { } } } + + fn hir_display(&self, ty: &'a T) -> impl Display + 'a { + ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithId) + } } diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 3b2a726688d0..5c8a73f308d9 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -23,10 +23,11 @@ use hir_expand::name::Name; use intern::Interned; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; +use stdx::never; use crate::{ - db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyExt, - WhereClause, + db::HirDatabase, ChalkTraitId, GenericArg, Interner, Substitution, TraitRef, TraitRefExt, Ty, + TyExt, WhereClause, }; pub(crate) fn fn_traits( @@ -176,6 +177,29 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { Generics { def, params: db.generic_params(def), parent_generics } } +pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution); + +impl<'a> ClosureSubst<'a> { + pub(crate) fn parent_subst(&self) -> &'a [GenericArg] { + match self.0.as_slice(Interner) { + [x @ .., _] => x, + _ => { + never!("Closure missing parameter"); + &[] + } + } + } + + pub(crate) fn sig_ty(&self) -> &'a Ty { + match self.0.as_slice(Interner) { + [.., x] => x.assert_ty_ref(Interner), + _ => { + unreachable!("Closure missing sig_ty parameter"); + } + } + } +} + #[derive(Debug)] pub(crate) struct Generics { def: GenericDefId, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 7e141a9e3642..720e0ea9d341 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1856,11 +1856,21 @@ impl Function { def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() }) } - pub fn eval(self, db: &dyn HirDatabase) -> Result<(), MirEvalError> { + pub fn eval( + self, + db: &dyn HirDatabase, + span_formatter: impl Fn(FileId, TextRange) -> String, + ) -> Result<(), String> { + let converter = |e: MirEvalError| { + let mut r = String::new(); + _ = e.pretty_print(&mut r, db, &span_formatter); + r + }; let body = db .mir_body(self.id.into()) - .map_err(|e| MirEvalError::MirLowerError(self.id.into(), e))?; - interpret_mir(db, &body, Substitution::empty(Interner), false)?; + .map_err(|e| MirEvalError::MirLowerError(self.id.into(), e)) + .map_err(converter)?; + interpret_mir(db, &body, Substitution::empty(Interner), false).map_err(converter)?; Ok(()) } } @@ -2006,7 +2016,7 @@ impl Const { } pub fn render_eval(self, db: &dyn HirDatabase) -> Result { - let c = db.const_eval(self.id, Substitution::empty(Interner))?; + let c = db.const_eval(self.id.into(), Substitution::empty(Interner))?; let r = format!("{}", HexifiedConst(c).display(db)); // We want to see things like `` and `` as they are probably bug in our // implementation, but there is no need to show things like `` or `` to diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 64b2221bdeab..5f2c61f5b5f8 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -30,7 +30,6 @@ pub struct HoverConfig { pub documentation: bool, pub keywords: bool, pub format: HoverDocFormat, - pub interpret_tests: bool, } #[derive(Clone, Debug, PartialEq, Eq)] diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index fb7b15e05d81..b7855202f369 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -3,8 +3,7 @@ use std::fmt::Display; use either::Either; use hir::{ - db::DefDatabase, Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, - MirEvalError, Semantics, TypeInfo, + Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo, }; use ide_db::{ base_db::SourceDatabase, @@ -403,19 +402,7 @@ pub(super) fn definition( )) }), Definition::Module(it) => label_and_docs(db, it), - Definition::Function(it) => label_and_layout_info_and_docs(db, it, |_| { - if !config.interpret_tests { - return None; - } - match it.eval(db) { - Ok(()) => Some("pass".into()), - Err(MirEvalError::MirLowerError(f, e)) => { - let name = &db.function_data(f).name; - Some(format!("error: fail to lower {name} due {e:?}")) - } - Err(e) => Some(format!("error: {e:?}")), - } - }), + Definition::Function(it) => label_and_docs(db, it), Definition::Adt(it) => label_and_layout_info_and_docs(db, it, |&it| { let layout = it.layout(db).ok()?; Some(format!("size = {}, align = {}", layout.size.bytes(), layout.align.abi.bytes())) diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 7294b625539c..b5b6fd80dea7 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -9,7 +9,6 @@ const HOVER_BASE_CONFIG: HoverConfig = HoverConfig { documentation: true, format: HoverDocFormat::Markdown, keywords: true, - interpret_tests: false, }; fn check_hover_no_result(ra_fixture: &str) { diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index d5be0c5b8834..27e9ba3c36b4 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -3,7 +3,10 @@ //! let _: u32 = /* */ loop {}; //! let _: &u32 = /* &* */ &mut 0; //! ``` -use hir::{Adjust, Adjustment, AutoBorrow, HirDisplay, Mutability, PointerCast, Safety, Semantics}; +use hir::{ + Adjust, Adjustment, AutoBorrow, HirDisplay, Mutability, OverloadedDeref, PointerCast, Safety, + Semantics, +}; use ide_db::RootDatabase; use stdx::never; @@ -88,7 +91,13 @@ pub(super) fn hints( Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => { ("", "never to any") } - Adjust::Deref(_) => ("*", "dereference"), + Adjust::Deref(None) => ("*", "dereference"), + Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => { + ("*", "`Deref` dereference") + } + Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => { + ("*", "`DerefMut` dereference") + } Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => ("&", "borrow"), Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => ("&mut ", "unique borrow"), Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => { diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index 069edaed6659..d8c8401af77d 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -444,7 +444,7 @@ fn main() { file_id: FileId( 1, ), - range: 5768..5776, + range: 5769..5777, }, ), tooltip: "", @@ -457,7 +457,7 @@ fn main() { file_id: FileId( 1, ), - range: 5800..5804, + range: 5801..5805, }, ), tooltip: "", @@ -478,7 +478,7 @@ fn main() { file_id: FileId( 1, ), - range: 5768..5776, + range: 5769..5777, }, ), tooltip: "", @@ -491,7 +491,7 @@ fn main() { file_id: FileId( 1, ), - range: 5800..5804, + range: 5801..5805, }, ), tooltip: "", @@ -512,7 +512,7 @@ fn main() { file_id: FileId( 1, ), - range: 5768..5776, + range: 5769..5777, }, ), tooltip: "", @@ -525,7 +525,7 @@ fn main() { file_id: FileId( 1, ), - range: 5800..5804, + range: 5801..5805, }, ), tooltip: "", diff --git a/crates/ide/src/interpret_function.rs b/crates/ide/src/interpret_function.rs new file mode 100644 index 000000000000..d111f98955ac --- /dev/null +++ b/crates/ide/src/interpret_function.rs @@ -0,0 +1,49 @@ +use hir::Semantics; +use ide_db::base_db::SourceDatabaseExt; +use ide_db::RootDatabase; +use ide_db::{base_db::FilePosition, LineIndexDatabase}; +use std::{fmt::Write, time::Instant}; +use syntax::TextRange; +use syntax::{algo::find_node_at_offset, ast, AstNode}; + +// Feature: Interpret Function +// +// |=== +// | Editor | Action Name +// +// | VS Code | **rust-analyzer: Interpret Function** +// |=== +pub(crate) fn interpret_function(db: &RootDatabase, position: FilePosition) -> String { + let start_time = Instant::now(); + let mut result = find_and_interpret(db, position) + .unwrap_or_else(|| "Not inside a function body".to_string()); + let duration = Instant::now() - start_time; + writeln!(result, "").unwrap(); + writeln!(result, "----------------------").unwrap(); + writeln!(result, " Finished in {}s", duration.as_secs_f32()).unwrap(); + result +} + +fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option { + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); + + let item = find_node_at_offset::(source_file.syntax(), position.offset)?; + let def = match item { + ast::Item::Fn(it) => sema.to_def(&it)?, + _ => return None, + }; + let span_formatter = |file_id, text_range: TextRange| { + let line_col = db.line_index(file_id).line_col(text_range.start()); + let path = &db + .source_root(db.file_source_root(file_id)) + .path_for_file(&file_id) + .map(|x| x.to_string()); + let path = path.as_deref().unwrap_or(""); + format!("file://{path}#{}:{}", line_col.line + 1, line_col.col) + }; + match def.eval(db, span_formatter) { + Ok(_) => Some("pass".to_string()), + Err(e) => Some(e), + } +} diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index d14cf83f0d77..3509dee0c96e 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -56,6 +56,7 @@ mod typing; mod view_crate_graph; mod view_hir; mod view_mir; +mod interpret_function; mod view_item_tree; mod shuffle_crate_graph; @@ -317,6 +318,10 @@ impl Analysis { self.with_db(|db| view_mir::view_mir(db, position)) } + pub fn interpret_function(&self, position: FilePosition) -> Cancellable { + self.with_db(|db| interpret_function::interpret_function(db, position)) + } + pub fn view_item_tree(&self, file_id: FileId) -> Cancellable { self.with_db(|db| view_item_tree::view_item_tree(db, file_id)) } diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 774b07775b9d..7ce20e973bb2 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -140,7 +140,6 @@ impl StaticIndex<'_> { documentation: true, keywords: true, format: crate::HoverDocFormat::Markdown, - interpret_tests: false, }; let tokens = tokens.filter(|token| { matches!( diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 89ca8e635676..13e8b683fdb3 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -574,6 +574,7 @@ pub struct LensConfig { // runnables pub run: bool, pub debug: bool, + pub interpret: bool, // implementations pub implementations: bool, @@ -1427,6 +1428,9 @@ impl Config { LensConfig { run: self.data.lens_enable && self.data.lens_run_enable, debug: self.data.lens_enable && self.data.lens_debug_enable, + interpret: self.data.lens_enable + && self.data.lens_run_enable + && self.data.interpret_tests, implementations: self.data.lens_enable && self.data.lens_implementations_enable, method_refs: self.data.lens_enable && self.data.lens_references_method_enable, refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable, @@ -1485,7 +1489,6 @@ impl Config { } }, keywords: self.data.hover_documentation_keywords_enable, - interpret_tests: self.data.interpret_tests, } } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 3c39f205e74c..a5b9004d80fe 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -163,6 +163,16 @@ pub(crate) fn handle_view_mir( Ok(res) } +pub(crate) fn handle_interpret_function( + snap: GlobalStateSnapshot, + params: lsp_types::TextDocumentPositionParams, +) -> Result { + let _p = profile::span("handle_interpret_function"); + let position = from_proto::file_position(&snap, params)?; + let res = snap.analysis.interpret_function(position)?; + Ok(res) +} + pub(crate) fn handle_view_file_text( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentIdentifier, diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 753094667629..90c9b16ec2b5 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -90,6 +90,14 @@ impl Request for ViewMir { const METHOD: &'static str = "rust-analyzer/viewMir"; } +pub enum InterpretFunction {} + +impl Request for InterpretFunction { + type Params = lsp_types::TextDocumentPositionParams; + type Result = String; + const METHOD: &'static str = "rust-analyzer/interpretFunction"; +} + pub enum ViewFileText {} impl Request for ViewFileText { diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 1c2b2627700c..6f31c6412223 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -665,6 +665,7 @@ impl GlobalState { .on::(handlers::handle_syntax_tree) .on::(handlers::handle_view_hir) .on::(handlers::handle_view_mir) + .on::(handlers::handle_interpret_function) .on::(handlers::handle_view_file_text) .on::(handlers::handle_view_crate_graph) .on::(handlers::handle_view_item_tree) diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index b0b8d38cdc38..521db6b0ca9c 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -1215,6 +1215,14 @@ pub(crate) fn code_lens( data: None, }) } + if lens_config.interpret { + let command = command::interpret_single(&r); + acc.push(lsp_types::CodeLens { + range: annotation_range, + command: Some(command), + data: None, + }) + } } AnnotationKind::HasImpls { pos: file_range, data } => { if !client_commands_config.show_reference { @@ -1359,6 +1367,15 @@ pub(crate) mod command { } } + pub(crate) fn interpret_single(_runnable: &lsp_ext::Runnable) -> lsp_types::Command { + lsp_types::Command { + title: "Interpret".into(), + command: "rust-analyzer.interpretFunction".into(), + // FIXME: use the `_runnable` here. + arguments: Some(vec![]), + } + } + pub(crate) fn goto_location( snap: &GlobalStateSnapshot, nav: &NavigationTarget, diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index ca1dbf532c3d..f403ef8ee03b 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -11,6 +11,7 @@ //! add: //! as_ref: sized //! bool_impl: option, fn +//! cell: copy, drop //! clone: sized //! coerce_unsized: unsize //! copy: clone @@ -139,6 +140,52 @@ pub mod hash { } // endregion:hash +// region:cell +pub mod cell { + use crate::mem; + + #[lang = "unsafe_cell"] + pub struct UnsafeCell { + value: T, + } + + impl UnsafeCell { + pub const fn new(value: T) -> UnsafeCell { + UnsafeCell { value } + } + + pub const fn get(&self) -> *mut T { + self as *const UnsafeCell as *const T as *mut T + } + } + + pub struct Cell { + value: UnsafeCell, + } + + impl Cell { + pub const fn new(value: T) -> Cell { + Cell { value: UnsafeCell::new(value) } + } + + pub fn set(&self, val: T) { + let old = self.replace(val); + mem::drop(old); + } + + pub fn replace(&self, val: T) -> T { + mem::replace(unsafe { &mut *self.value.get() }, val) + } + } + + impl Cell { + pub fn get(&self) -> T { + unsafe { *self.value.get() } + } + } +} +// endregion:cell + // region:clone pub mod clone { #[lang = "clone"] @@ -220,6 +267,13 @@ pub mod mem { // endregion:manually_drop pub fn drop(_x: T) {} + pub const fn replace(dest: &mut T, src: T) -> T { + unsafe { + let result = *dest; + *dest = src; + result + } + } } // endregion:drop @@ -710,6 +764,14 @@ pub mod option { None => default, } } + // region:result + pub const fn ok_or(self, err: E) -> Result { + match self { + Some(v) => Ok(v), + None => Err(err), + } + } + // endregion:result // region:fn pub fn and_then(self, f: F) -> Option where diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index d748400b6959..76080eca4e03 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@