Skip to content

Commit

Permalink
Auto merge of #41773 - frewsxcv:rollup, r=frewsxcv
Browse files Browse the repository at this point in the history
Rollup of 9 pull requests

- Successful merges: #41064, #41307, #41512, #41582, #41678, #41722, #41734, #41761, #41763
- Failed merges:
  • Loading branch information
bors committed May 5, 2017
2 parents f420965 + 5bed9dc commit 42a4f37
Show file tree
Hide file tree
Showing 75 changed files with 1,490 additions and 950 deletions.
2 changes: 1 addition & 1 deletion src/doc/nomicon
31 changes: 13 additions & 18 deletions src/liballoc/arc.rs
Expand Up @@ -277,8 +277,7 @@ impl<T> Arc<T> {
atomic::fence(Acquire);

unsafe {
let ptr = *this.ptr;
let elem = ptr::read(&(*ptr).data);
let elem = ptr::read(&this.ptr.as_ref().data);

// Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { ptr: this.ptr };
Expand Down Expand Up @@ -306,7 +305,7 @@ impl<T> Arc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &(**this.ptr).data as *const _ };
let ptr: *const T = &*this;
mem::forget(this);
ptr
}
Expand Down Expand Up @@ -345,7 +344,7 @@ impl<T> Arc<T> {
// `data` field from the pointer.
let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
Arc {
ptr: Shared::new(ptr as *const _),
ptr: Shared::new(ptr as *mut u8 as *mut _),
}
}
}
Expand Down Expand Up @@ -452,17 +451,17 @@ impl<T: ?Sized> Arc<T> {
// `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to these
// contents.
unsafe { &**self.ptr }
unsafe { self.ptr.as_ref() }
}

// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = self.ptr.as_mut_ptr();
let ptr = self.ptr.as_ptr();

// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(&mut (*ptr).data);
ptr::drop_in_place(&mut self.ptr.as_mut().data);

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Expand All @@ -488,9 +487,7 @@ impl<T: ?Sized> Arc<T> {
/// assert!(!Arc::ptr_eq(&five, &other_five));
/// ```
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
let this_ptr: *const ArcInner<T> = *this.ptr;
let other_ptr: *const ArcInner<T> = *other.ptr;
this_ptr == other_ptr
this.ptr.as_ptr() == other.ptr.as_ptr()
}
}

Expand Down Expand Up @@ -621,7 +618,7 @@ impl<T: Clone> Arc<T> {
// here (due to zeroing) because data is no longer accessed by
// other threads (due to there being no more strong refs at this
// point).
let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
mem::swap(this, &mut swap);
mem::forget(swap);
}
Expand All @@ -634,8 +631,7 @@ impl<T: Clone> Arc<T> {
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
let inner = &mut *this.ptr.as_mut_ptr();
&mut inner.data
&mut this.ptr.as_mut().data
}
}
}
Expand Down Expand Up @@ -677,8 +673,7 @@ impl<T: ?Sized> Arc<T> {
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
let inner = &mut *this.ptr.as_mut_ptr();
Some(&mut inner.data)
Some(&mut this.ptr.as_mut().data)
}
} else {
None
Expand Down Expand Up @@ -878,7 +873,7 @@ impl<T: ?Sized> Weak<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self.ptr }
unsafe { self.ptr.as_ref() }
}
}

Expand Down Expand Up @@ -962,7 +957,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
let ptr = *self.ptr;
let ptr = self.ptr.as_ptr();

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
Expand Down Expand Up @@ -1143,7 +1138,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.ptr, f)
fmt::Pointer::fmt(&self.ptr, f)
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/liballoc/boxed.rs
Expand Up @@ -156,7 +156,7 @@ fn make_place<T>() -> IntermediateBox<T> {
let align = mem::align_of::<T>();

let p = if size == 0 {
heap::EMPTY as *mut u8
mem::align_of::<T>() as *mut u8
} else {
let p = unsafe { heap::allocate(size, align) };
if p.is_null() {
Expand Down
6 changes: 4 additions & 2 deletions src/liballoc/heap.rs
Expand Up @@ -138,7 +138,9 @@ pub fn usable_size(size: usize, align: usize) -> usize {
///
/// This preserves the non-null invariant for types like `Box<T>`. The address
/// may overlap with non-zero-size memory allocations.
pub const EMPTY: *mut () = 0x1 as *mut ();
#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
#[unstable(feature = "heap_api", issue = "27700")]
pub const EMPTY: *mut () = 1 as *mut ();

/// The allocator for unique pointers.
// This function must not unwind. If it does, MIR trans will fail.
Expand All @@ -147,7 +149,7 @@ pub const EMPTY: *mut () = 0x1 as *mut ();
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
if size == 0 {
EMPTY as *mut u8
align as *mut u8
} else {
let ptr = allocate(size, align);
if ptr.is_null() {
Expand Down
28 changes: 13 additions & 15 deletions src/liballoc/raw_vec.rs
Expand Up @@ -22,13 +22,13 @@ use core::cmp;
/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
/// In particular:
///
/// * Produces heap::EMPTY on zero-sized types
/// * Produces heap::EMPTY on zero-length allocations
/// * Produces Unique::empty() on zero-sized types
/// * Produces Unique::empty() on zero-length allocations
/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics)
/// * Guards against 32-bit systems allocating more than isize::MAX bytes
/// * Guards against overflowing your length
/// * Aborts on OOM
/// * Avoids freeing heap::EMPTY
/// * Avoids freeing Unique::empty()
/// * Contains a ptr::Unique and thus endows the user with all related benefits
///
/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
Expand All @@ -55,15 +55,13 @@ impl<T> RawVec<T> {
/// it makes a RawVec with capacity `usize::MAX`. Useful for implementing
/// delayed allocation.
pub fn new() -> Self {
unsafe {
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };

// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
RawVec {
ptr: Unique::new(heap::EMPTY as *mut T),
cap: cap,
}
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec {
ptr: Unique::empty(),
cap: cap,
}
}

Expand Down Expand Up @@ -101,7 +99,7 @@ impl<T> RawVec<T> {

// handles ZSTs and `cap = 0` alike
let ptr = if alloc_size == 0 {
heap::EMPTY as *mut u8
mem::align_of::<T>() as *mut u8
} else {
let align = mem::align_of::<T>();
let ptr = if zeroed {
Expand Down Expand Up @@ -148,10 +146,10 @@ impl<T> RawVec<T> {

impl<T> RawVec<T> {
/// Gets a raw pointer to the start of the allocation. Note that this is
/// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must
/// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must
/// be careful.
pub fn ptr(&self) -> *mut T {
*self.ptr
self.ptr.as_ptr()
}

/// Gets the capacity of the allocation.
Expand Down Expand Up @@ -563,7 +561,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec<T> {

let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
heap::deallocate(self.ptr() as *mut u8, num_bytes, align);
}
}
}
Expand Down
50 changes: 22 additions & 28 deletions src/liballoc/rc.rs
Expand Up @@ -230,7 +230,7 @@ use core::cell::Cell;
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::{abort, assume};
use core::intrinsics::abort;
use core::marker;
use core::marker::Unsize;
use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
Expand Down Expand Up @@ -358,7 +358,7 @@ impl<T> Rc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
let ptr: *const T = &*this;
mem::forget(this);
ptr
}
Expand Down Expand Up @@ -395,7 +395,11 @@ impl<T> Rc<T> {
pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
// `value` field from the pointer.
Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }

let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
Rc {
ptr: Shared::new(ptr as *mut u8 as *mut _)
}
}
}

Expand Down Expand Up @@ -451,7 +455,7 @@ impl<T> Rc<[T]> {
// Free the original allocation without freeing its (moved) contents.
box_free(Box::into_raw(value));

Rc { ptr: Shared::new(ptr as *const _) }
Rc { ptr: Shared::new(ptr as *mut _) }
}
}
}
Expand Down Expand Up @@ -553,8 +557,9 @@ impl<T: ?Sized> Rc<T> {
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
Some(&mut inner.value)
unsafe {
Some(&mut this.ptr.as_mut().value)
}
} else {
None
}
Expand All @@ -578,9 +583,7 @@ impl<T: ?Sized> Rc<T> {
/// assert!(!Rc::ptr_eq(&five, &other_five));
/// ```
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
let this_ptr: *const RcBox<T> = *this.ptr;
let other_ptr: *const RcBox<T> = *other.ptr;
this_ptr == other_ptr
this.ptr.as_ptr() == other.ptr.as_ptr()
}
}

Expand Down Expand Up @@ -623,7 +626,7 @@ impl<T: Clone> Rc<T> {
} else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks
unsafe {
let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
mem::swap(this, &mut swap);
swap.dec_strong();
// Remove implicit strong-weak ref (no need to craft a fake
Expand All @@ -637,8 +640,9 @@ impl<T: Clone> Rc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
&mut inner.value
unsafe {
&mut this.ptr.as_mut().value
}
}
}

Expand Down Expand Up @@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
let ptr = self.ptr.as_mut_ptr();
let ptr = self.ptr.as_ptr();

self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
ptr::drop_in_place(&mut (*ptr).value);
ptr::drop_in_place(self.ptr.as_mut());

// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
Expand Down Expand Up @@ -925,7 +929,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.ptr, f)
fmt::Pointer::fmt(&self.ptr, f)
}
}

Expand Down Expand Up @@ -1067,7 +1071,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let ptr = self.ptr.as_ptr();

self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
Expand Down Expand Up @@ -1175,12 +1179,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self.ptr)
self.ptr.as_ref()
}
}
}
Expand All @@ -1189,12 +1188,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self.ptr)
self.ptr.as_ref()
}
}
}
Expand Down
4 changes: 1 addition & 3 deletions src/libarena/lib.rs
Expand Up @@ -31,7 +31,6 @@
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(heap_api)]
#![feature(generic_param_attrs)]
#![feature(staged_api)]
#![cfg_attr(test, feature(test))]
Expand All @@ -48,7 +47,6 @@ use std::mem;
use std::ptr;
use std::slice;

use alloc::heap;
use alloc::raw_vec::RawVec;

/// An arena that can hold objects of only one type.
Expand Down Expand Up @@ -140,7 +138,7 @@ impl<T> TypedArena<T> {
unsafe {
if mem::size_of::<T>() == 0 {
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
let ptr = heap::EMPTY as *mut T;
let ptr = mem::align_of::<T>() as *mut T;
// Don't drop the object. This `write` is equivalent to `forget`.
ptr::write(ptr, object);
&mut *ptr
Expand Down

0 comments on commit 42a4f37

Please sign in to comment.