Skip to content

Commit

Permalink
Align pointers to DST fields properly
Browse files Browse the repository at this point in the history
DST fields, being of an unknown type, are not automatically aligned
properly, so a pointer to the field needs to be aligned using the
information in the vtable.

Fixes #26403 and a number of other DST-related bugs discovered while
implementing this.
  • Loading branch information
Aatch committed Dec 7, 2015
1 parent 7b77f67 commit a2557d4
Show file tree
Hide file tree
Showing 11 changed files with 337 additions and 61 deletions.
72 changes: 57 additions & 15 deletions src/librustc_trans/trans/_match.rs
Expand Up @@ -709,8 +709,10 @@ fn extract_variant_args<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
val: MatchInput)
-> ExtractedBlock<'blk, 'tcx> {
let _icx = push_ctxt("match::extract_variant_args");
// Assume enums are always sized for now.
let val = adt::MaybeSizedValue::sized(val.val);
let args = (0..adt::num_args(repr, disr_val)).map(|i| {
adt::trans_field_ptr(bcx, repr, val.val, disr_val, i)
adt::trans_field_ptr(bcx, repr, val, disr_val, i)
}).collect();

ExtractedBlock { vals: args, bcx: bcx }
Expand Down Expand Up @@ -1199,7 +1201,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
(arg_count - 1, Load(bcx, expr::get_dataptr(bcx, val.val)))
};
let mut field_vals: Vec<ValueRef> = (0..arg_count).map(|ix|
adt::trans_field_ptr(bcx, &*repr, struct_val, 0, ix)
// By definition, these are all sized
adt::trans_field_ptr(bcx, &*repr, adt::MaybeSizedValue::sized(struct_val), 0, ix)
).collect();

match left_ty.sty {
Expand All @@ -1211,10 +1214,13 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
monomorphize::field_ty(bcx.tcx(), substs, field)
}).unwrap();
let scratch = alloc_ty(bcx, unsized_ty, "__struct_field_fat_ptr");

let meta = Load(bcx, expr::get_meta(bcx, val.val));
let struct_val = adt::MaybeSizedValue::unsized_(struct_val, meta);

let data = adt::trans_field_ptr(bcx, &*repr, struct_val, 0, arg_count);
let len = Load(bcx, expr::get_meta(bcx, val.val));
Store(bcx, data, expr::get_dataptr(bcx, scratch));
Store(bcx, len, expr::get_meta(bcx, scratch));
Store(bcx, meta, expr::get_meta(bcx, scratch));
field_vals.push(scratch);
}
_ => {}
Expand Down Expand Up @@ -1785,9 +1791,10 @@ pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
val: MatchInput,
cleanup_scope: cleanup::ScopeId)
-> Block<'blk, 'tcx> {
debug!("bind_irrefutable_pat(bcx={}, pat={:?})",
debug!("bind_irrefutable_pat(bcx={}, pat={:?}, val={})",
bcx.to_str(),
pat);
pat,
bcx.val_to_string(val.val));

if bcx.sess().asm_comments() {
add_comment(bcx, &format!("bind_irrefutable_pat(pat={:?})",
Expand Down Expand Up @@ -1866,9 +1873,10 @@ pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
Some(ref elems) => {
// This is the tuple struct case.
let repr = adt::represent_node(bcx, pat.id);
let val = adt::MaybeSizedValue::sized(val.val);
for (i, elem) in elems.iter().enumerate() {
let fldptr = adt::trans_field_ptr(bcx, &*repr,
val.val, 0, i);
val, 0, i);
bcx = bind_irrefutable_pat(
bcx,
&**elem,
Expand All @@ -1888,14 +1896,35 @@ pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
let pat_v = VariantInfo::of_node(tcx, pat_ty, pat.id);

let val = if type_is_sized(tcx, pat_ty) {
adt::MaybeSizedValue::sized(val.val)
} else {
let data = Load(bcx, expr::get_dataptr(bcx, val.val));
let meta = Load(bcx, expr::get_meta(bcx, val.val));
adt::MaybeSizedValue::unsized_(data, meta)
};

for f in fields {
let name = f.node.name;
let fldptr = adt::trans_field_ptr(
let field_idx = pat_v.field_index(name);
let mut fldptr = adt::trans_field_ptr(
bcx,
&*pat_repr,
val.val,
val,
pat_v.discr,
pat_v.field_index(name));
field_idx);

let fty = pat_v.fields[field_idx].1;
// If it's not sized, then construct a fat pointer instead of
// a regular one
if !type_is_sized(tcx, fty) {
let scratch = alloc_ty(bcx, fty, "__struct_field_fat_ptr");
debug!("Creating fat pointer {}", bcx.val_to_string(scratch));
Store(bcx, fldptr, expr::get_dataptr(bcx, scratch));
Store(bcx, val.meta, expr::get_meta(bcx, scratch));
fldptr = scratch;
}
bcx = bind_irrefutable_pat(bcx,
&*f.node.pat,
MatchInput::from_val(fldptr),
Expand All @@ -1904,8 +1933,9 @@ pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
hir::PatTup(ref elems) => {
let repr = adt::represent_node(bcx, pat.id);
let val = adt::MaybeSizedValue::sized(val.val);
for (i, elem) in elems.iter().enumerate() {
let fldptr = adt::trans_field_ptr(bcx, &*repr, val.val, 0, i);
let fldptr = adt::trans_field_ptr(bcx, &*repr, val, 0, i);
bcx = bind_irrefutable_pat(
bcx,
&**elem,
Expand All @@ -1914,16 +1944,28 @@ pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
}
hir::PatBox(ref inner) => {
let llbox = Load(bcx, val.val);
let pat_ty = node_id_type(bcx, inner.id);
// Don't load DSTs, instead pass along a fat ptr
let val = if type_is_sized(tcx, pat_ty) {
Load(bcx, val.val)
} else {
val.val
};
bcx = bind_irrefutable_pat(
bcx, &**inner, MatchInput::from_val(llbox), cleanup_scope);
bcx, &**inner, MatchInput::from_val(val), cleanup_scope);
}
hir::PatRegion(ref inner, _) => {
let loaded_val = Load(bcx, val.val);
let pat_ty = node_id_type(bcx, inner.id);
// Don't load DSTs, instead pass along a fat ptr
let val = if type_is_sized(tcx, pat_ty) {
Load(bcx, val.val)
} else {
val.val
};
bcx = bind_irrefutable_pat(
bcx,
&**inner,
MatchInput::from_val(loaded_val),
MatchInput::from_val(val),
cleanup_scope);
}
hir::PatVec(ref before, ref slice, ref after) => {
Expand Down
127 changes: 116 additions & 11 deletions src/librustc_trans/trans/adt.rs
Expand Up @@ -43,6 +43,7 @@

pub use self::Repr::*;

use std;
use std::rc::Rc;

use llvm::{ValueRef, True, IntEQ, IntNE};
Expand All @@ -60,6 +61,7 @@ use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::datum;
use trans::debuginfo::DebugLoc;
use trans::glue;
use trans::machine;
use trans::monomorphize;
use trans::type_::Type;
Expand Down Expand Up @@ -153,6 +155,32 @@ pub struct Struct<'tcx> {
pub fields: Vec<Ty<'tcx>>,
}

#[derive(Copy, Clone)]
pub struct MaybeSizedValue {
pub value: ValueRef,
pub meta: ValueRef,
}

impl MaybeSizedValue {
pub fn sized(value: ValueRef) -> MaybeSizedValue {
MaybeSizedValue {
value: value,
meta: std::ptr::null_mut()
}
}

pub fn unsized_(value: ValueRef, meta: ValueRef) -> MaybeSizedValue {
MaybeSizedValue {
value: value,
meta: meta
}
}

pub fn has_meta(&self) -> bool {
!self.meta.is_null()
}
}

/// Convenience for `represent_type`. There should probably be more or
/// these, for places in trans where the `Ty` isn't directly
/// available.
Expand Down Expand Up @@ -976,7 +1004,7 @@ pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
}
General(ity, ref cases, dtor) => {
if dtor_active(dtor) {
let ptr = trans_field_ptr(bcx, r, val, discr,
let ptr = trans_field_ptr(bcx, r, MaybeSizedValue::sized(val), discr,
cases[discr as usize].fields.len() - 2);
Store(bcx, C_u8(bcx.ccx(), DTOR_NEEDED), ptr);
}
Expand Down Expand Up @@ -1037,7 +1065,7 @@ pub fn num_args(r: &Repr, discr: Disr) -> usize {

/// Access a field, at a point when the value's case is known.
pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
val: ValueRef, discr: Disr, ix: usize) -> ValueRef {
val: MaybeSizedValue, discr: Disr, ix: usize) -> ValueRef {
// Note: if this ever needs to generate conditionals (e.g., if we
// decide to do some kind of cdr-coding-like non-unique repr
// someday), it will need to return a possibly-new bcx as well.
Expand All @@ -1060,13 +1088,13 @@ pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
// The contents of memory at this pointer can't matter, but use
// the value that's "reasonable" in case of pointer comparison.
PointerCast(bcx, val, ty.ptr_to())
PointerCast(bcx, val.value, ty.ptr_to())
}
RawNullablePointer { nndiscr, nnty, .. } => {
assert_eq!(ix, 0);
assert_eq!(discr, nndiscr);
let ty = type_of::type_of(bcx.ccx(), nnty);
PointerCast(bcx, val, ty.ptr_to())
PointerCast(bcx, val.value, ty.ptr_to())
}
StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
assert_eq!(discr, nndiscr);
Expand All @@ -1075,18 +1103,94 @@ pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
}
}

pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, val: ValueRef,
pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, val: MaybeSizedValue,
ix: usize, needs_cast: bool) -> ValueRef {
let val = if needs_cast {
let ptr_val = if needs_cast {
let ccx = bcx.ccx();
let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
let fields = st.fields.iter().map(|&ty| {
type_of::in_memory_type_of(ccx, ty)
}).collect::<Vec<_>>();
let real_ty = Type::struct_(ccx, &fields[..], st.packed);
PointerCast(bcx, val, real_ty.ptr_to())
PointerCast(bcx, val.value, real_ty.ptr_to())
} else {
val
val.value
};

StructGEP(bcx, val, ix)
let fty = st.fields[ix];
// Simple case - we can just GEP the field
// * First field - Always aligned properly
// * Packed struct - There is no alignment padding
// * Field is sized - pointer is properly aligned already
if ix == 0 || st.packed || type_is_sized(bcx.tcx(), fty) {
return StructGEP(bcx, ptr_val, ix);
}

// If the type of the last field is [T] or str, then we don't need to do
// any adjusments
match fty.sty {
ty::TySlice(..) | ty::TyStr => {
return StructGEP(bcx, ptr_val, ix);
}
_ => ()
}

// There's no metadata available, log the case and just do the GEP.
if !val.has_meta() {
debug!("Unsized field `{}`, of `{}` has no metadata for adjustment",
ix,
bcx.val_to_string(ptr_val));
return StructGEP(bcx, ptr_val, ix);
}

let dbloc = DebugLoc::None;

// We need to get the pointer manually now.
// We do this by casting to a *i8, then offsetting it by the appropriate amount.
// We do this instead of, say, simply adjusting the pointer from the result of a GEP
// because the the field may have an arbitrary alignment in the LLVM representation
// anyway.
//
// To demonstrate:
// struct Foo<T: ?Sized> {
// x: u16,
// y: T
// }
//
// The type Foo<Foo<Trait>> is represented in LLVM as { u16, { u16, u8 }}, meaning that
// the `y` field has 16-bit alignment.

let meta = val.meta;

// st.size is the size of the sized portion of the struct. So the position
// exactly after it is the offset for unaligned data.
let unaligned_offset = C_uint(bcx.ccx(), st.size);

// Get the alignment of the field
let (_, align) = glue::size_and_align_of_dst(bcx, fty, meta);

// Bump the unaligned offset up to the appropriate alignment using the
// following expression:
//
// (unaligned offset + (align - 1)) & -align

// Calculate offset
let align_sub_1 = Sub(bcx, align, C_uint(bcx.ccx(), 1u64), dbloc);
let offset = And(bcx,
Add(bcx, unaligned_offset, align_sub_1, dbloc),
Neg(bcx, align, dbloc),
dbloc);

debug!("struct_field_ptr: DST field offset: {}",
bcx.val_to_string(offset));

// Cast and adjust pointer
let byte_ptr = PointerCast(bcx, ptr_val, Type::i8p(bcx.ccx()));
let byte_ptr = GEP(bcx, byte_ptr, &[offset]);

// Finally, cast back to the type expected
let ll_fty = type_of::in_memory_type_of(bcx.ccx(), fty);
debug!("struct_field_ptr: Field type is {}", ll_fty.to_string());
PointerCast(bcx, byte_ptr, ll_fty.ptr_to())
}

pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
Expand Down Expand Up @@ -1168,7 +1272,8 @@ pub fn trans_drop_flag_ptr<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
cleanup::CustomScope(custom_cleanup_scope), (), |_, bcx, _| bcx
));
bcx = fold_variants(bcx, r, val, |variant_cx, st, value| {
let ptr = struct_field_ptr(variant_cx, st, value, (st.fields.len() - 1), false);
let ptr = struct_field_ptr(variant_cx, st, MaybeSizedValue::sized(value),
(st.fields.len() - 1), false);
datum::Datum::new(ptr, ptr_ty, datum::Lvalue::new("adt::trans_drop_flag_ptr"))
.store_to(variant_cx, scratch.val)
});
Expand Down

0 comments on commit a2557d4

Please sign in to comment.