Skip to content

Commit

Permalink
Automatically prefer integer addresses for zst MPlace
Browse files Browse the repository at this point in the history
  • Loading branch information
oli-obk committed Jan 7, 2020
1 parent ee84c30 commit cc0fbdf
Show file tree
Hide file tree
Showing 7 changed files with 26 additions and 53 deletions.
4 changes: 2 additions & 2 deletions src/librustc_mir/const_eval/eval_queries.rs
Expand Up @@ -115,7 +115,7 @@ pub(super) fn op_to_const<'tcx>(
// by-val is if we are in const_field, i.e., if this is (a field of) something that we // by-val is if we are in const_field, i.e., if this is (a field of) something that we
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or // "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
// structs containing such. // structs containing such.
op.try_as_mplace() op.try_as_mplace(ecx)
}; };
let val = match immediate { let val = match immediate {
Ok(mplace) => { Ok(mplace) => {
Expand All @@ -132,7 +132,7 @@ pub(super) fn op_to_const<'tcx>(
// `Immediate` is when we are called from `const_field`, and that `Immediate` // `Immediate` is when we are called from `const_field`, and that `Immediate`
// comes from a constant so it can happen have `Undef`, because the indirect // comes from a constant so it can happen have `Undef`, because the indirect
// memory that was read had undefined bytes. // memory that was read had undefined bytes.
let mplace = op.assert_mem_place(); let mplace = op.assert_mem_place(ecx);
let ptr = mplace.ptr.assert_ptr(); let ptr = mplace.ptr.assert_ptr();
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id); let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
ConstValue::ByRef { alloc, offset: ptr.offset } ConstValue::ByRef { alloc, offset: ptr.offset }
Expand Down
30 changes: 5 additions & 25 deletions src/librustc_mir/interpret/operand.rs
Expand Up @@ -267,7 +267,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&self, &self,
op: OpTy<'tcx, M::PointerTag>, op: OpTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
match op.try_as_mplace() { match op.try_as_mplace(self) {
Ok(mplace) => Ok(self.force_mplace_ptr(mplace)?.into()), Ok(mplace) => Ok(self.force_mplace_ptr(mplace)?.into()),
Err(imm) => Ok(imm.into()), // Nothing to cast/force Err(imm) => Ok(imm.into()), // Nothing to cast/force
} }
Expand Down Expand Up @@ -335,7 +335,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&self, &self,
src: OpTy<'tcx, M::PointerTag>, src: OpTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, Result<ImmTy<'tcx, M::PointerTag>, MPlaceTy<'tcx, M::PointerTag>>> { ) -> InterpResult<'tcx, Result<ImmTy<'tcx, M::PointerTag>, MPlaceTy<'tcx, M::PointerTag>>> {
Ok(match src.try_as_mplace() { Ok(match src.try_as_mplace(self) {
Ok(mplace) => { Ok(mplace) => {
if let Some(val) = self.try_read_immediate_from_mplace(mplace)? { if let Some(val) = self.try_read_immediate_from_mplace(mplace)? {
Ok(val) Ok(val)
Expand Down Expand Up @@ -383,7 +383,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
op: OpTy<'tcx, M::PointerTag>, op: OpTy<'tcx, M::PointerTag>,
field: u64, field: u64,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
let base = match op.try_as_mplace() { let base = match op.try_as_mplace(self) {
Ok(mplace) => { Ok(mplace) => {
// The easy case // The easy case
let field = self.mplace_field(mplace, field)?; let field = self.mplace_field(mplace, field)?;
Expand Down Expand Up @@ -420,7 +420,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
variant: VariantIdx, variant: VariantIdx,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
// Downcasts only change the layout // Downcasts only change the layout
Ok(match op.try_as_mplace() { Ok(match op.try_as_mplace(self) {
Ok(mplace) => self.mplace_downcast(mplace, variant)?.into(), Ok(mplace) => self.mplace_downcast(mplace, variant)?.into(),
Err(..) => { Err(..) => {
let layout = op.layout.for_variant(self, variant); let layout = op.layout.for_variant(self, variant);
Expand All @@ -439,30 +439,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Field(field, _) => self.operand_field(base, field.index() as u64)?, Field(field, _) => self.operand_field(base, field.index() as u64)?,
Downcast(_, variant) => self.operand_downcast(base, variant)?, Downcast(_, variant) => self.operand_downcast(base, variant)?,
Deref => self.deref_operand(base)?.into(), Deref => self.deref_operand(base)?.into(),
ConstantIndex { .. } | Index(_) if base.layout.is_zst() => {
OpTy {
op: Operand::Immediate(Scalar::zst().into()),
// the actual index doesn't matter, so we just pick a convenient one like 0
layout: base.layout.field(self, 0)?,
}
}
Subslice { from, to, from_end } if base.layout.is_zst() => {
let elem_ty = if let ty::Array(elem_ty, _) = base.layout.ty.kind {
elem_ty
} else {
bug!("slices shouldn't be zero-sized");
};
assert!(!from_end, "arrays shouldn't be subsliced from the end");

OpTy {
op: Operand::Immediate(Scalar::zst().into()),
layout: self.layout_of(self.tcx.mk_array(elem_ty, (to - from) as u64))?,
}
}
Subslice { .. } | ConstantIndex { .. } | Index(_) => { Subslice { .. } | ConstantIndex { .. } | Index(_) => {
// The rest should only occur as mplace, we do not use Immediates for types // The rest should only occur as mplace, we do not use Immediates for types
// allowing such operations. This matches place_projection forcing an allocation. // allowing such operations. This matches place_projection forcing an allocation.
let mplace = base.assert_mem_place(); let mplace = base.assert_mem_place(self);
self.mplace_projection(mplace, proj_elem)?.into() self.mplace_projection(mplace, proj_elem)?.into()
} }
}) })
Expand Down
19 changes: 10 additions & 9 deletions src/librustc_mir/interpret/place.rs
Expand Up @@ -200,16 +200,17 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
// These are defined here because they produce a place. // These are defined here because they produce a place.
impl<'tcx, Tag: ::std::fmt::Debug + Copy> OpTy<'tcx, Tag> { impl<'tcx, Tag: ::std::fmt::Debug + Copy> OpTy<'tcx, Tag> {
#[inline(always)] #[inline(always)]
pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, ImmTy<'tcx, Tag>> { pub fn try_as_mplace(self, cx: &impl HasDataLayout) -> Result<MPlaceTy<'tcx, Tag>, ImmTy<'tcx, Tag>> {
match *self { match *self {
Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }), Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }),
Operand::Immediate(_) if self.layout.is_zst() => Ok(MPlaceTy::dangling(self.layout, cx)),
Operand::Immediate(imm) => Err(ImmTy { imm, layout: self.layout }), Operand::Immediate(imm) => Err(ImmTy { imm, layout: self.layout }),
} }
} }


#[inline(always)] #[inline(always)]
pub fn assert_mem_place(self) -> MPlaceTy<'tcx, Tag> { pub fn assert_mem_place(self, cx: &impl HasDataLayout) -> MPlaceTy<'tcx, Tag> {
self.try_as_mplace().unwrap() self.try_as_mplace(cx).unwrap()
} }
} }


Expand Down Expand Up @@ -305,7 +306,7 @@ where
/// On success, returns `None` for zero-sized accesses (where nothing else is /// On success, returns `None` for zero-sized accesses (where nothing else is
/// left to do) and a `Pointer` to use for the actual access otherwise. /// left to do) and a `Pointer` to use for the actual access otherwise.
#[inline] #[inline]
pub fn check_mplace_access( pub(super) fn check_mplace_access(
&self, &self,
place: MPlaceTy<'tcx, M::PointerTag>, place: MPlaceTy<'tcx, M::PointerTag>,
size: Option<Size>, size: Option<Size>,
Expand Down Expand Up @@ -338,7 +339,7 @@ where


/// Force `place.ptr` to a `Pointer`. /// Force `place.ptr` to a `Pointer`.
/// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot. /// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
pub fn force_mplace_ptr( pub(super) fn force_mplace_ptr(
&self, &self,
mut place: MPlaceTy<'tcx, M::PointerTag>, mut place: MPlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
Expand Down Expand Up @@ -415,7 +416,7 @@ where


// Iterates over all fields of an array. Much more efficient than doing the // Iterates over all fields of an array. Much more efficient than doing the
// same by repeatedly calling `mplace_array`. // same by repeatedly calling `mplace_array`.
pub fn mplace_array_fields( pub(super) fn mplace_array_fields(
&self, &self,
base: MPlaceTy<'tcx, Tag>, base: MPlaceTy<'tcx, Tag>,
) -> InterpResult<'tcx, impl Iterator<Item = InterpResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'tcx> ) -> InterpResult<'tcx, impl Iterator<Item = InterpResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'tcx>
Expand All @@ -430,7 +431,7 @@ where
Ok((0..len).map(move |i| base.offset(i * stride, None, layout, dl))) Ok((0..len).map(move |i| base.offset(i * stride, None, layout, dl)))
} }


pub fn mplace_subslice( fn mplace_subslice(
&self, &self,
base: MPlaceTy<'tcx, M::PointerTag>, base: MPlaceTy<'tcx, M::PointerTag>,
from: u64, from: u64,
Expand Down Expand Up @@ -471,7 +472,7 @@ where
base.offset(from_offset, meta, layout, self) base.offset(from_offset, meta, layout, self)
} }


pub fn mplace_downcast( pub(super) fn mplace_downcast(
&self, &self,
base: MPlaceTy<'tcx, M::PointerTag>, base: MPlaceTy<'tcx, M::PointerTag>,
variant: VariantIdx, variant: VariantIdx,
Expand All @@ -482,7 +483,7 @@ where
} }


/// Project into an mplace /// Project into an mplace
pub fn mplace_projection( pub(super) fn mplace_projection(
&self, &self,
base: MPlaceTy<'tcx, M::PointerTag>, base: MPlaceTy<'tcx, M::PointerTag>,
proj_elem: &mir::PlaceElem<'tcx>, proj_elem: &mir::PlaceElem<'tcx>,
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_mir/interpret/terminator.rs
Expand Up @@ -378,7 +378,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
None => { None => {
// Unsized self. // Unsized self.
args[0].assert_mem_place() args[0].assert_mem_place(self)
} }
}; };
// Find and consult vtable // Find and consult vtable
Expand Down
11 changes: 4 additions & 7 deletions src/librustc_mir/interpret/validity.rs
Expand Up @@ -571,12 +571,9 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
match op.layout.ty.kind { match op.layout.ty.kind {
ty::Str => { ty::Str => {
let mplace = op.assert_mem_place(); // strings are never immediate let mplace = op.assert_mem_place(self.ecx); // strings are never immediate
try_validation!( try_validation!(self.ecx.read_str(mplace),
self.ecx.read_str(mplace), "uninitialized or non-UTF-8 data in str", self.path);
"uninitialized or non-UTF-8 data in str",
self.path
);
} }
ty::Array(tys, ..) | ty::Slice(tys) ty::Array(tys, ..) | ty::Slice(tys)
if { if {
Expand Down Expand Up @@ -604,7 +601,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
return Ok(()); return Ok(());
} }
// non-ZST array cannot be immediate, slices are never immediate // non-ZST array cannot be immediate, slices are never immediate
let mplace = op.assert_mem_place(); let mplace = op.assert_mem_place(self.ecx);
// This is the length of the array/slice. // This is the length of the array/slice.
let len = mplace.len(self.ecx)?; let len = mplace.len(self.ecx)?;
// zero length slices have nothing to be checked // zero length slices have nothing to be checked
Expand Down
10 changes: 2 additions & 8 deletions src/librustc_mir/interpret/visitor.rs
Expand Up @@ -223,7 +223,7 @@ macro_rules! make_value_visitor {
match v.layout().ty.kind { match v.layout().ty.kind {
ty::Dynamic(..) => { ty::Dynamic(..) => {
// immediate trait objects are not a thing // immediate trait objects are not a thing
let dest = v.to_op(self.ecx())?.assert_mem_place(); let dest = v.to_op(self.ecx())?.assert_mem_place(self.ecx());
let inner = self.ecx().unpack_dyn_trait(dest)?.1; let inner = self.ecx().unpack_dyn_trait(dest)?.1;
trace!("walk_value: dyn object layout: {:#?}", inner.layout); trace!("walk_value: dyn object layout: {:#?}", inner.layout);
// recurse with the inner type // recurse with the inner type
Expand Down Expand Up @@ -292,13 +292,7 @@ macro_rules! make_value_visitor {
}, },
layout::FieldPlacement::Array { .. } => { layout::FieldPlacement::Array { .. } => {
// Let's get an mplace first. // Let's get an mplace first.
let mplace = if v.layout().is_zst() { let mplace = v.to_op(self.ecx())?.assert_mem_place(self.ecx());
// it's a ZST, the memory content cannot matter
MPlaceTy::dangling(v.layout(), self.ecx())
} else {
// non-ZST array/slice/str cannot be immediate
v.to_op(self.ecx())?.assert_mem_place()
};
// Now we can go over all the fields. // Now we can go over all the fields.
let iter = self.ecx().mplace_array_fields(mplace)? let iter = self.ecx().mplace_array_fields(mplace)?
.map(|f| f.and_then(|f| { .map(|f| f.and_then(|f| {
Expand Down
3 changes: 2 additions & 1 deletion src/librustc_mir/transform/const_prop.rs
Expand Up @@ -707,7 +707,8 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
ScalarMaybeUndef::Scalar(r), ScalarMaybeUndef::Scalar(r),
)) => l.is_bits() && r.is_bits(), )) => l.is_bits() && r.is_bits(),
interpret::Operand::Indirect(_) if mir_opt_level >= 2 => { interpret::Operand::Indirect(_) if mir_opt_level >= 2 => {
intern_const_alloc_recursive(&mut self.ecx, None, op.assert_mem_place()) let mplace = op.assert_mem_place(&self.ecx);
intern_const_alloc_recursive(&mut self.ecx, None, mplace)
.expect("failed to intern alloc"); .expect("failed to intern alloc");
true true
} }
Expand Down

0 comments on commit cc0fbdf

Please sign in to comment.