Skip to content

Commit

Permalink
fallout from NonZero/Unique/Shared changes
Browse files Browse the repository at this point in the history
  • Loading branch information
Gankra committed May 5, 2017
1 parent 6e2efe3 commit 4ff583b
Show file tree
Hide file tree
Showing 16 changed files with 123 additions and 120 deletions.
31 changes: 13 additions & 18 deletions src/liballoc/arc.rs
Expand Up @@ -277,8 +277,7 @@ impl<T> Arc<T> {
atomic::fence(Acquire);

unsafe {
let ptr = *this.ptr;
let elem = ptr::read(&(*ptr).data);
let elem = ptr::read(&this.ptr.as_ref().data);

// Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { ptr: this.ptr };
Expand Down Expand Up @@ -306,7 +305,7 @@ impl<T> Arc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &(**this.ptr).data as *const _ };
let ptr: *const T = &*this;
mem::forget(this);
ptr
}
Expand Down Expand Up @@ -345,7 +344,7 @@ impl<T> Arc<T> {
// `data` field from the pointer.
let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
Arc {
ptr: Shared::new(ptr as *const _),
ptr: Shared::new(ptr as *mut u8 as *mut _),
}
}
}
Expand Down Expand Up @@ -452,17 +451,17 @@ impl<T: ?Sized> Arc<T> {
// `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to these
// contents.
unsafe { &**self.ptr }
unsafe { self.ptr.as_ref() }
}

// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = self.ptr.as_mut_ptr();
let ptr = self.ptr.as_ptr();

// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(&mut (*ptr).data);
ptr::drop_in_place(&mut self.ptr.as_mut().data);

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Expand All @@ -488,9 +487,7 @@ impl<T: ?Sized> Arc<T> {
/// assert!(!Arc::ptr_eq(&five, &other_five));
/// ```
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
let this_ptr: *const ArcInner<T> = *this.ptr;
let other_ptr: *const ArcInner<T> = *other.ptr;
this_ptr == other_ptr
this.ptr.as_ptr() == other.ptr.as_ptr()
}
}

Expand Down Expand Up @@ -621,7 +618,7 @@ impl<T: Clone> Arc<T> {
// here (due to zeroing) because data is no longer accessed by
// other threads (due to there being no more strong refs at this
// point).
let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
mem::swap(this, &mut swap);
mem::forget(swap);
}
Expand All @@ -634,8 +631,7 @@ impl<T: Clone> Arc<T> {
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
let inner = &mut *this.ptr.as_mut_ptr();
&mut inner.data
&mut this.ptr.as_mut().data
}
}
}
Expand Down Expand Up @@ -677,8 +673,7 @@ impl<T: ?Sized> Arc<T> {
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
let inner = &mut *this.ptr.as_mut_ptr();
Some(&mut inner.data)
Some(&mut this.ptr.as_mut().data)
}
} else {
None
Expand Down Expand Up @@ -867,7 +862,7 @@ impl<T: ?Sized> Weak<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self.ptr }
unsafe { self.ptr.as_ref() }
}
}

Expand Down Expand Up @@ -951,7 +946,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
let ptr = *self.ptr;
let ptr = self.ptr.as_ptr();

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
Expand Down Expand Up @@ -1132,7 +1127,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.ptr, f)
fmt::Pointer::fmt(&self.ptr, f)
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/liballoc/raw_vec.rs
Expand Up @@ -151,7 +151,7 @@ impl<T> RawVec<T> {
/// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must
/// be careful.
pub fn ptr(&self) -> *mut T {
*self.ptr
self.ptr.ptr()
}

/// Gets the capacity of the allocation.
Expand Down Expand Up @@ -563,7 +563,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec<T> {

let num_bytes = elem_size * self.cap;
unsafe {
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
heap::deallocate(self.ptr() as *mut u8, num_bytes, align);
}
}
}
Expand Down
50 changes: 22 additions & 28 deletions src/liballoc/rc.rs
Expand Up @@ -230,7 +230,7 @@ use core::cell::Cell;
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::{abort, assume};
use core::intrinsics::abort;
use core::marker;
use core::marker::Unsize;
use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
Expand Down Expand Up @@ -358,7 +358,7 @@ impl<T> Rc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
let ptr: *const T = &*this;
mem::forget(this);
ptr
}
Expand Down Expand Up @@ -395,7 +395,11 @@ impl<T> Rc<T> {
pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
// `value` field from the pointer.
Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }

let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
Rc {
ptr: Shared::new(ptr as *mut u8 as *mut _)
}
}
}

Expand Down Expand Up @@ -451,7 +455,7 @@ impl<T> Rc<[T]> {
// Free the original allocation without freeing its (moved) contents.
box_free(Box::into_raw(value));

Rc { ptr: Shared::new(ptr as *const _) }
Rc { ptr: Shared::new(ptr as *mut _) }
}
}
}
Expand Down Expand Up @@ -553,8 +557,9 @@ impl<T: ?Sized> Rc<T> {
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
Some(&mut inner.value)
unsafe {
Some(&mut this.ptr.as_mut().value)
}
} else {
None
}
Expand All @@ -578,9 +583,7 @@ impl<T: ?Sized> Rc<T> {
/// assert!(!Rc::ptr_eq(&five, &other_five));
/// ```
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
let this_ptr: *const RcBox<T> = *this.ptr;
let other_ptr: *const RcBox<T> = *other.ptr;
this_ptr == other_ptr
this.ptr.as_ptr() == other.ptr.as_ptr()
}
}

Expand Down Expand Up @@ -623,7 +626,7 @@ impl<T: Clone> Rc<T> {
} else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks
unsafe {
let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
mem::swap(this, &mut swap);
swap.dec_strong();
// Remove implicit strong-weak ref (no need to craft a fake
Expand All @@ -637,8 +640,9 @@ impl<T: Clone> Rc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
&mut inner.value
unsafe {
&mut this.ptr.as_mut().value
}
}
}

Expand Down Expand Up @@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
let ptr = self.ptr.as_mut_ptr();
let ptr = self.ptr.as_ptr();

self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
ptr::drop_in_place(&mut (*ptr).value);
ptr::drop_in_place(self.ptr.as_mut());

// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
Expand Down Expand Up @@ -925,7 +929,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&*self.ptr, f)
fmt::Pointer::fmt(&self.ptr, f)
}
}

Expand Down Expand Up @@ -1067,7 +1071,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let ptr = self.ptr.as_ptr();

self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
Expand Down Expand Up @@ -1175,12 +1179,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self.ptr)
self.ptr.as_ref()
}
}
}
Expand All @@ -1189,12 +1188,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
&(**self.ptr)
self.ptr.as_ref()
}
}
}
Expand Down
18 changes: 9 additions & 9 deletions src/libcollections/btree/node.rs
Expand Up @@ -152,12 +152,12 @@ impl<K, V> BoxedNode<K, V> {
}

unsafe fn from_ptr(ptr: NonZero<*const LeafNode<K, V>>) -> Self {
BoxedNode { ptr: Unique::new(*ptr as *mut LeafNode<K, V>) }
BoxedNode { ptr: Unique::new(ptr.get() as *mut LeafNode<K, V>) }
}

fn as_ptr(&self) -> NonZero<*const LeafNode<K, V>> {
unsafe {
NonZero::new(*self.ptr as *const LeafNode<K, V>)
NonZero::new(self.ptr.as_ptr())
}
}
}
Expand Down Expand Up @@ -241,7 +241,7 @@ impl<K, V> Root<K, V> {
pub fn pop_level(&mut self) {
debug_assert!(self.height > 0);

let top = *self.node.ptr as *mut u8;
let top = self.node.ptr.as_ptr() as *mut u8;

self.node = unsafe {
BoxedNode::from_ptr(self.as_mut()
Expand Down Expand Up @@ -308,15 +308,15 @@ unsafe impl<K: Send, V: Send, Type> Send
impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
fn as_internal(&self) -> &InternalNode<K, V> {
unsafe {
&*(*self.node as *const InternalNode<K, V>)
&*(self.node.get() as *const InternalNode<K, V>)
}
}
}

impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
unsafe {
&mut *(*self.node as *mut InternalNode<K, V>)
&mut *(self.node.get() as *mut InternalNode<K, V>)
}
}
}
Expand Down Expand Up @@ -358,7 +358,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {

fn as_leaf(&self) -> &LeafNode<K, V> {
unsafe {
&**self.node
&*self.node.get()
}
}

Expand Down Expand Up @@ -510,7 +510,7 @@ impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {

fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
unsafe {
&mut *(*self.node as *mut LeafNode<K, V>)
&mut *(self.node.get() as *mut LeafNode<K, V>)
}
}

Expand Down Expand Up @@ -1253,13 +1253,13 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}

heap::deallocate(
*right_node.node as *mut u8,
right_node.node.get() as *mut u8,
mem::size_of::<InternalNode<K, V>>(),
mem::align_of::<InternalNode<K, V>>()
);
} else {
heap::deallocate(
*right_node.node as *mut u8,
right_node.node.get() as *mut u8,
mem::size_of::<LeafNode<K, V>>(),
mem::align_of::<LeafNode<K, V>>()
);
Expand Down

0 comments on commit 4ff583b

Please sign in to comment.