Skip to content

Commit

Permalink
Auto merge of #50612 - Zoxc:thin-slice, r=michaelwoerister
Browse files Browse the repository at this point in the history
Make &Slice a thin pointer

Split out from #50395

r? @michaelwoerister
  • Loading branch information
bors committed May 28, 2018
2 parents d0456c6 + fb4e3b6 commit 68e0e58
Show file tree
Hide file tree
Showing 5 changed files with 133 additions and 49 deletions.
59 changes: 34 additions & 25 deletions src/libarena/lib.rs
Expand Up @@ -314,17 +314,15 @@ impl DroplessArena {
false
}

fn align_for<T>(&self) {
let align = mem::align_of::<T>();
fn align(&self, align: usize) {
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
self.ptr.set(final_address as *mut u8);
assert!(self.ptr <= self.end);
}

#[inline(never)]
#[cold]
fn grow<T>(&self, n: usize) {
let needed_bytes = n * mem::size_of::<T>();
fn grow(&self, needed_bytes: usize) {
unsafe {
let mut chunks = self.chunks.borrow_mut();
let (chunk, mut new_capacity);
Expand Down Expand Up @@ -356,25 +354,38 @@ impl DroplessArena {
}

#[inline]
pub fn alloc<T>(&self, object: T) -> &mut T {
pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
unsafe {
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);
assert!(bytes != 0);

self.align(align);

self.align_for::<T>();
let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
if (future_end as *mut u8) >= self.end.get() {
self.grow::<T>(1)
self.grow(bytes);
}

let ptr = self.ptr.get();
// Set the pointer past ourselves
self.ptr.set(
intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize) as *mut u8,
intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8,
);
slice::from_raw_parts_mut(ptr, bytes)
}
}

#[inline]
pub fn alloc<T>(&self, object: T) -> &mut T {
assert!(!mem::needs_drop::<T>());

let mem = self.alloc_raw(
mem::size_of::<T>(),
mem::align_of::<T>()) as *mut _ as *mut T;

unsafe {
// Write into uninitialized memory.
ptr::write(ptr as *mut T, object);
&mut *(ptr as *mut T)
ptr::write(mem, object);
&mut *mem
}
}

Expand All @@ -393,21 +404,13 @@ impl DroplessArena {
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);
assert!(slice.len() != 0);
self.align_for::<T>();

let future_end = unsafe {
intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
};
if (future_end as *mut u8) >= self.end.get() {
self.grow::<T>(slice.len());
}
let mem = self.alloc_raw(
slice.len() * mem::size_of::<T>(),
mem::align_of::<T>()) as *mut _ as *mut T;

unsafe {
let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
self.ptr.set(intrinsics::arith_offset(
self.ptr.get(),
(slice.len() * mem::size_of::<T>()) as isize,
) as *mut u8);
let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
arena_slice.copy_from_slice(slice);
arena_slice
}
Expand Down Expand Up @@ -464,6 +467,12 @@ impl SyncDroplessArena {
self.lock.lock().in_arena(ptr)
}

#[inline(always)]
pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
// Extend the lifetime of the result since it's limited to the lock guard
unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) }
}

#[inline(always)]
pub fn alloc<T>(&self, object: T) -> &mut T {
// Extend the lifetime of the result since it's limited to the lock guard
Expand Down
1 change: 1 addition & 0 deletions src/librustc/lib.rs
Expand Up @@ -54,6 +54,7 @@
#![feature(macro_vis_matcher)]
#![feature(never_type)]
#![feature(exhaustive_patterns)]
#![feature(extern_types)]
#![feature(non_exhaustive)]
#![feature(proc_macro_internals)]
#![feature(quote)]
Expand Down
33 changes: 21 additions & 12 deletions src/librustc/ty/context.rs
Expand Up @@ -794,6 +794,12 @@ impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {

impl<'tcx> CommonTypes<'tcx> {
fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
// Ensure our type representation does not grow
#[cfg(target_pointer_width = "64")]
assert!(mem::size_of::<ty::TypeVariants>() <= 24);
#[cfg(target_pointer_width = "64")]
assert!(mem::size_of::<ty::TyS>() <= 32);

let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty);
let mk_region = |r| {
if let Some(r) = interners.region.borrow().get(&r) {
Expand Down Expand Up @@ -2056,9 +2062,8 @@ for Interned<'tcx, Slice<Goal<'tcx>>> {

macro_rules! intern_method {
($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
$alloc_method:ident,
$alloc_method:expr,
$alloc_to_key:expr,
$alloc_to_ret:expr,
$keep_in_local_tcx:expr) -> $ty:ty) => {
impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
Expand All @@ -2081,7 +2086,7 @@ macro_rules! intern_method {
v);
}

let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
let i = $alloc_method(&self.interners.arena, v);
interner.insert(Interned(i));
i
} else {
Expand All @@ -2094,7 +2099,9 @@ macro_rules! intern_method {
let v = unsafe {
mem::transmute(v)
};
let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
// Cast to 'gcx
let i = unsafe { mem::transmute(i) };
interner.insert(Interned(i));
i
}
Expand All @@ -2121,8 +2128,10 @@ macro_rules! direct_interners {

intern_method!(
$lt_tcx,
$name: $method($ty, alloc, |x| x, |x| x, $keep_in_local_tcx) -> $ty
);)+
$name: $method($ty,
|a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) },
|x| x,
$keep_in_local_tcx) -> $ty);)+
}
}

Expand All @@ -2137,10 +2146,11 @@ direct_interners!('tcx,

macro_rules! slice_interners {
($($field:ident: $method:ident($ty:ident)),+) => (
$(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
|xs: &[$ty]| -> &Slice<$ty> {
unsafe { mem::transmute(xs) }
}, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
$(intern_method!( 'tcx, $field: $method(
&[$ty<'tcx>],
|a, v| Slice::from_arena(a, v),
Deref::deref,
|xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
)
}

Expand All @@ -2162,9 +2172,8 @@ intern_method! {
'tcx,
canonical_var_infos: _intern_canonical_var_infos(
&[CanonicalVarInfo],
alloc_slice,
|a, v| Slice::from_arena(a, v),
Deref::deref,
|xs: &[CanonicalVarInfo]| -> &Slice<CanonicalVarInfo> { unsafe { mem::transmute(xs) } },
|_xs: &[CanonicalVarInfo]| -> bool { false }
) -> Slice<CanonicalVarInfo>
}
Expand Down
87 changes: 76 additions & 11 deletions src/librustc/ty/mod.rs
Expand Up @@ -36,6 +36,7 @@ use ty::util::{IntTypeExt, Discr};
use ty::walk::TypeWalker;
use util::captures::Captures;
use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
use arena::SyncDroplessArena;

use serialize::{self, Encodable, Encoder};
use std::cell::RefCell;
Expand Down Expand Up @@ -582,54 +583,113 @@ impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> {
}
}

extern {
/// A dummy type used to force Slice to by unsized without requiring fat pointers
type OpaqueSliceContents;
}

/// A wrapper for slices with the additional invariant
/// that the slice is interned and no other slice with
/// the same contents can exist in the same context.
/// This means we can use pointer + length for both
/// This means we can use pointer for both
/// equality comparisons and hashing.
#[derive(Debug, RustcEncodable)]
pub struct Slice<T>([T]);
#[repr(C)]
pub struct Slice<T> {
len: usize,
data: [T; 0],
opaque: OpaqueSliceContents,
}

impl<T: Copy> Slice<T> {
#[inline]
fn from_arena<'tcx>(arena: &'tcx SyncDroplessArena, slice: &[T]) -> &'tcx Slice<T> {
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);
assert!(slice.len() != 0);

// Align up the size of the len (usize) field
let align = mem::align_of::<T>();
let align_mask = align - 1;
let offset = mem::size_of::<usize>();
let offset = (offset + align_mask) & !align_mask;

let size = offset + slice.len() * mem::size_of::<T>();

let mem = arena.alloc_raw(
size,
cmp::max(mem::align_of::<T>(), mem::align_of::<usize>()));
unsafe {
let result = &mut *(mem.as_mut_ptr() as *mut Slice<T>);
// Write the length
result.len = slice.len();

// Write the elements
let arena_slice = slice::from_raw_parts_mut(result.data.as_mut_ptr(), result.len);
arena_slice.copy_from_slice(slice);

result
}
}
}

impl<T: fmt::Debug> fmt::Debug for Slice<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(**self).fmt(f)
}
}

impl<T: Encodable> Encodable for Slice<T> {
#[inline]
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
(**self).encode(s)
}
}

impl<T> Ord for Slice<T> where T: Ord {
fn cmp(&self, other: &Slice<T>) -> Ordering {
if self == other { Ordering::Equal } else {
<[T] as Ord>::cmp(&self.0, &other.0)
<[T] as Ord>::cmp(&**self, &**other)
}
}
}

impl<T> PartialOrd for Slice<T> where T: PartialOrd {
fn partial_cmp(&self, other: &Slice<T>) -> Option<Ordering> {
if self == other { Some(Ordering::Equal) } else {
<[T] as PartialOrd>::partial_cmp(&self.0, &other.0)
<[T] as PartialOrd>::partial_cmp(&**self, &**other)
}
}
}

impl<T> PartialEq for Slice<T> {
impl<T: PartialEq> PartialEq for Slice<T> {
#[inline]
fn eq(&self, other: &Slice<T>) -> bool {
(&self.0 as *const [T]) == (&other.0 as *const [T])
(self as *const _) == (other as *const _)
}
}
impl<T> Eq for Slice<T> {}
impl<T: Eq> Eq for Slice<T> {}

impl<T> Hash for Slice<T> {
#[inline]
fn hash<H: Hasher>(&self, s: &mut H) {
(self.as_ptr(), self.len()).hash(s)
(self as *const Slice<T>).hash(s)
}
}

impl<T> Deref for Slice<T> {
type Target = [T];
#[inline(always)]
fn deref(&self) -> &[T] {
&self.0
unsafe {
slice::from_raw_parts(self.data.as_ptr(), self.len)
}
}
}

impl<'a, T> IntoIterator for &'a Slice<T> {
type Item = &'a T;
type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
#[inline(always)]
fn into_iter(self) -> Self::IntoIter {
self[..].iter()
}
Expand All @@ -638,9 +698,14 @@ impl<'a, T> IntoIterator for &'a Slice<T> {
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Slice<Ty<'tcx>> {}

impl<T> Slice<T> {
#[inline(always)]
pub fn empty<'a>() -> &'a Slice<T> {
#[repr(align(64), C)]
struct EmptySlice([u8; 64]);
static EMPTY_SLICE: EmptySlice = EmptySlice([0; 64]);
assert!(mem::align_of::<T>() <= 64);
unsafe {
mem::transmute(slice::from_raw_parts(0x1 as *const T, 0))
&*(&EMPTY_SLICE as *const _ as *const Slice<T>)
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/test/mir-opt/basic_assignment.rs
Expand Up @@ -48,7 +48,7 @@ fn main() {
// _2 = move _3;
// StorageDead(_3);
// StorageLive(_4);
// UserAssertTy(Canonical { variables: Slice([]), value: std::option::Option<std::boxed::Box<u32>> }, _4);
// UserAssertTy(Canonical { variables: [], value: std::option::Option<std::boxed::Box<u32>> }, _4);
// _4 = std::option::Option<std::boxed::Box<u32>>::None;
// StorageLive(_5);
// StorageLive(_6);
Expand Down

0 comments on commit 68e0e58

Please sign in to comment.