Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move "mutable thing in const" check from interning to validity #78351

Merged
merged 5 commits into from
Oct 28, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 0 additions & 12 deletions compiler/rustc_middle/src/mir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,16 +210,6 @@ pub struct Body<'tcx> {
/// We hold in this field all the constants we are not able to evaluate yet.
pub required_consts: Vec<Constant<'tcx>>,

/// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because
/// we'd statically know that no thing with interior mutability will ever be available to the
/// user without some serious unsafe code. Now this means that our promoted is actually
/// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because
/// the index may be a runtime value. Such a promoted value is illegal because it has reachable
/// interior mutability. This flag just makes this situation very obvious where the previous
/// implementation without the flag hid this situation silently.
/// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components.
pub ignore_interior_mut_in_const_validation: bool,

/// Does this body use generic parameters. This is used for the `ConstEvaluatable` check.
///
/// Note that this does not actually mean that this body is not computable right now.
Expand Down Expand Up @@ -276,7 +266,6 @@ impl<'tcx> Body<'tcx> {
var_debug_info,
span,
required_consts: Vec::new(),
ignore_interior_mut_in_const_validation: false,
is_polymorphic: false,
predecessor_cache: PredecessorCache::new(),
};
Expand Down Expand Up @@ -306,7 +295,6 @@ impl<'tcx> Body<'tcx> {
required_consts: Vec::new(),
generator_kind: None,
var_debug_info: Vec::new(),
ignore_interior_mut_in_const_validation: false,
is_polymorphic: false,
predecessor_cache: PredecessorCache::new(),
};
Expand Down
49 changes: 24 additions & 25 deletions compiler/rustc_mir/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr, MemoryExtra};
use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, GlobalId, Immediate,
InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar,
ScalarMaybeUninit, StackPopCleanup,
};

Expand Down Expand Up @@ -59,23 +59,15 @@ fn eval_body_using_ecx<'mir, 'tcx>(
ecx.run()?;

// Intern the result
// FIXME: since the DefId of a promoted is the DefId of its owner, this
// means that promoteds in statics are actually interned like statics!
// However, this is also currently crucial because we promote mutable
// non-empty slices in statics to extend their lifetime, and this
// ensures that they are put into a mutable allocation.
// For other kinds of promoteds in statics (like array initializers), this is rather silly.
let intern_kind = match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None if cid.promoted.is_some() => InternKind::Promoted,
_ => InternKind::Constant,
let intern_kind = if cid.promoted.is_some() {
InternKind::Promoted
} else {
match tcx.static_mutability(cid.instance.def_id()) {
Some(m) => InternKind::Static(m),
None => InternKind::Constant,
}
};
intern_const_alloc_recursive(
ecx,
intern_kind,
ret,
body.ignore_interior_mut_in_const_validation,
);
intern_const_alloc_recursive(ecx, intern_kind, ret);

debug!("eval_body_using_ecx done: {:?}", *ret);
Ok(ret)
Expand Down Expand Up @@ -376,16 +368,23 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
// Since evaluation had no errors, valiate the resulting constant:
let validation = try {
// FIXME do not validate promoteds until a decision on
// https://github.com/rust-lang/rust/issues/67465 is made
// https://github.com/rust-lang/rust/issues/67465 and
// https://github.com/rust-lang/rust/issues/67534 is made.
// Promoteds can contain unexpected `UnsafeCell` and reference `static`s, but their
// otherwise restricted form ensures that this is still sound. We just lose the
// extra safety net of some of the dynamic checks. They can also contain invalid
// values, but since we do not usually check intermediate results of a computation
// for validity, it might be surprising to do that here.
if cid.promoted.is_none() {
let mut ref_tracking = RefTracking::new(mplace);
let mut inner = false;
while let Some((mplace, path)) = ref_tracking.todo.pop() {
ecx.const_validate_operand(
mplace.into(),
path,
&mut ref_tracking,
/*may_ref_to_static*/ ecx.memory.extra.can_access_statics,
)?;
let mode = match tcx.static_mutability(cid.instance.def_id()) {
Some(_) => CtfeValidationMode::Regular, // a `static`
None => CtfeValidationMode::Const { inner },
};
ecx.const_validate_operand(mplace.into(), path, &mut ref_tracking, mode)?;
inner = true;
}
}
};
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_mir/src/const_eval/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub(crate) fn const_caller_location(
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);

let loc_place = ecx.alloc_caller_location(file, line, col);
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place, false);
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place);
ConstValue::Scalar(loc_place.ptr)
}

Expand Down
87 changes: 28 additions & 59 deletions compiler/rustc_mir/src/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,23 @@
//!
//! After a const evaluation has computed a value, before we destroy the const evaluator's session
//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
//!
//! In principle, this is not very complicated: we recursively walk the final value, follow all the
//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
//! is picking the right mutability for the allocations in a `static` initializer: we want to make
//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
//! same time, we need to make memory that could be mutated by the program mutable to avoid
//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
Comment on lines +8 to +12
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does that mean we could theoretically mark all static allocations as mutable? Is this pass slow to run, would that have any significant speedup?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's the other way around, we can just mark all allocations in constants as immutable. I'm not sure whether we'd get a speedup though.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does that mean we could theoretically mark all static allocations as mutable? Is this pass slow to run, would that have any significant speedup?

For allocations in a static, yes, And then many people will complain that their read-only static are not put into read-only memory. So that's not really an option.

//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
//! always immutable. At least for `const` however we use this opportunity to reject any `const`
//! that contains allocations whose mutability we cannot identify.)

use super::validity::RefTracking;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_middle::mir::interpret::InterpResult;
use rustc_middle::ty::{self, layout::TyAndLayout, query::TyCtxtAt, Ty};
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
use rustc_target::abi::Size;

use rustc_ast::Mutability;
Expand Down Expand Up @@ -40,11 +51,6 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> {
/// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
/// the intern mode of references we encounter.
inside_unsafe_cell: bool,

/// This flag is to avoid triggering UnsafeCells are not allowed behind references in constants
/// for promoteds.
/// It's a copy of `mir::Body`'s ignore_interior_mut_in_const_validation field
ignore_interior_mut_in_const: bool,
}

#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
Expand All @@ -53,22 +59,14 @@ enum InternMode {
/// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
/// is *mutable*.
Static(hir::Mutability),
/// The "base value" of a const, which can have `UnsafeCell` (as in `const FOO: Cell<i32>`),
/// but that interior mutability is simply ignored.
ConstBase,
/// The "inner values" of a const with references, where `UnsafeCell` is an error.
ConstInner,
/// A `const`.
Const,
}

/// Signalling data structure to ensure we don't recurse
/// into the memory of other constants or statics
struct IsStaticOrFn;

fn mutable_memory_in_const(tcx: TyCtxtAt<'_>, kind: &str) {
// FIXME: show this in validation instead so we can point at where in the value the error is?
tcx.sess.span_err(tcx.span, &format!("mutable memory ({}) is not allowed in constant", kind));
}

/// Intern an allocation without looking at its children.
/// `mode` is the mode of the environment where we found this pointer.
/// `mutablity` is the mutability of the place to be interned; even if that says
Expand Down Expand Up @@ -129,9 +127,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
// See const_eval::machine::MemoryExtra::can_access_statics for why
// immutability is so important.

// There are no sensible checks we can do here; grep for `mutable_memory_in_const` to
// find the checks we are doing elsewhere to avoid even getting here for memory
// that "wants" to be mutable.
// Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
alloc.mutability = Mutability::Not;
};
// link the alloc id to the actual allocation
Expand Down Expand Up @@ -167,17 +163,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
mplace: MPlaceTy<'tcx>,
fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
) -> InterpResult<'tcx> {
// ZSTs cannot contain pointers, so we can skip them.
if mplace.layout.is_zst() {
return Ok(());
}

if let Some(def) = mplace.layout.ty.ty_adt_def() {
if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
if self.mode == InternMode::ConstInner && !self.ignore_interior_mut_in_const {
// We do not actually make this memory mutable. But in case the user
// *expected* it to be mutable, make sure we error. This is just a
// sanity check to prevent users from accidentally exploiting the UB
// they caused. It also helps us to find cases where const-checking
// failed to prevent an `UnsafeCell` (but as `ignore_interior_mut_in_const`
// shows that part is not airtight).
mutable_memory_in_const(self.ecx.tcx, "`UnsafeCell`");
}
// We are crossing over an `UnsafeCell`, we can mutate again. This means that
// References we encounter inside here are interned as pointing to mutable
// allocations.
Expand All @@ -189,11 +181,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
}
}

// ZSTs cannot contain pointers, so we can skip them.
if mplace.layout.is_zst() {
return Ok(());
}

self.walk_aggregate(mplace, fields)
}

Expand All @@ -213,7 +200,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() {
// Explicitly choose const mode here, since vtables are immutable, even
// if the reference of the fat pointer is mutable.
self.intern_shallow(vtable.alloc_id, InternMode::ConstInner, None);
self.intern_shallow(vtable.alloc_id, InternMode::Const, None);
} else {
// Validation will error (with a better message) on an invalid vtable pointer.
// Let validation show the error message, but make sure it *does* error.
Expand All @@ -225,7 +212,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
// Only recurse for allocation-backed pointers.
if let Scalar::Ptr(ptr) = mplace.ptr {
// Compute the mode with which we intern this. Our goal here is to make as many
// statics as we can immutable so they can be placed in const memory by LLVM.
// statics as we can immutable so they can be placed in read-only memory by LLVM.
let ref_mode = match self.mode {
InternMode::Static(mutbl) => {
// In statics, merge outer mutability with reference mutability and
Expand Down Expand Up @@ -259,27 +246,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir
}
}
}
InternMode::ConstBase | InternMode::ConstInner => {
// Ignore `UnsafeCell`, everything is immutable. Do some sanity checking
// for mutable references that we encounter -- they must all be ZST.
// This helps to prevent users from accidentally exploiting UB that they
// caused (by somehow getting a mutable reference in a `const`).
if ref_mutability == Mutability::Mut {
match referenced_ty.kind() {
ty::Array(_, n) if n.eval_usize(*tcx, self.ecx.param_env) == 0 => {}
ty::Slice(_)
if mplace.meta.unwrap_meta().to_machine_usize(self.ecx)?
== 0 => {}
_ => mutable_memory_in_const(tcx, "`&mut`"),
}
} else {
// A shared reference. We cannot check `freeze` here due to references
// like `&dyn Trait` that are actually immutable. We do check for
// concrete `UnsafeCell` when traversing the pointee though (if it is
// a new allocation, not yet interned).
}
// Go on with the "inner" rules.
InternMode::ConstInner
InternMode::Const => {
// Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
// checking for mutable references that we encounter -- they must all be
// ZST.
InternMode::Const
}
};
match self.intern_shallow(ptr.alloc_id, ref_mode, Some(referenced_ty)) {
Expand Down Expand Up @@ -318,7 +289,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
ecx: &mut InterpCx<'mir, 'tcx, M>,
intern_kind: InternKind,
ret: MPlaceTy<'tcx>,
ignore_interior_mut_in_const: bool,
) where
'tcx: 'mir,
{
Expand All @@ -327,7 +297,7 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
InternKind::Static(mutbl) => InternMode::Static(mutbl),
// `Constant` includes array lengths.
// `Promoted` includes non-`Copy` array initializers and `rustc_args_required_const` arguments.
InternKind::Constant | InternKind::Promoted => InternMode::ConstBase,
InternKind::Constant | InternKind::Promoted => InternMode::Const,
};

// Type based interning.
Expand Down Expand Up @@ -357,7 +327,6 @@ pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
ecx,
mode,
leftover_allocations,
ignore_interior_mut_in_const,
inside_unsafe_cell: false,
}
.visit_value(mplace);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_mir/src/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackP
pub use self::memory::{AllocCheck, FnVal, Memory, MemoryKind};
pub use self::operand::{ImmTy, Immediate, OpTy, Operand};
pub use self::place::{MPlaceTy, MemPlace, MemPlaceMeta, Place, PlaceTy};
pub use self::validity::RefTracking;
pub use self::validity::{CtfeValidationMode, RefTracking};
pub use self::visitor::{MutValueVisitor, ValueVisitor};

crate use self::intrinsics::eval_nullary_intrinsic;
Expand Down
Loading