Skip to content

Commit

Permalink
libcore: Make it unsafe to create NonZero and impl Deref.
Browse files Browse the repository at this point in the history
  • Loading branch information
luqmana committed Dec 29, 2014
1 parent 4af5054 commit 466135b
Show file tree
Hide file tree
Showing 4 changed files with 55 additions and 59 deletions.
15 changes: 6 additions & 9 deletions src/liballoc/arc.rs
Expand Up @@ -164,7 +164,7 @@ impl<T> Arc<T> {
weak: atomic::AtomicUint::new(1),
data: data,
};
Arc { _ptr: NonZero(unsafe { mem::transmute(x) }) }
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}

/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
Expand Down Expand Up @@ -193,8 +193,7 @@ impl<T> Arc<T> {
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
// to these contents.
let NonZero(ptr) = self._ptr;
unsafe { &*ptr }
unsafe { &**self._ptr }
}
}

Expand Down Expand Up @@ -281,8 +280,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the Arc itself to be `mut`, so we're returning the only
// possible reference to the inner data.
let NonZero(ptr) = self._ptr;
let inner = unsafe { &mut *ptr };
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
Expand Down Expand Up @@ -317,7 +315,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
// it is guaranteed to be zeroed after the first if it's run more than once)
let NonZero(ptr) = self._ptr;
let ptr = *self._ptr;
if ptr.is_null() { return }

// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
Expand Down Expand Up @@ -388,8 +386,7 @@ impl<T: Sync + Send> Weak<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
let NonZero(ptr) = self._ptr;
unsafe { &*ptr }
unsafe { &**self._ptr }
}
}

Expand Down Expand Up @@ -445,7 +442,7 @@ impl<T: Sync + Send> Drop for Weak<T> {
/// } // implicit drop
/// ```
fn drop(&mut self) {
let NonZero(ptr) = self._ptr;
let ptr = *self._ptr;

// see comments above for why this check is here
if ptr.is_null() { return }
Expand Down
29 changes: 8 additions & 21 deletions src/liballoc/rc.rs
Expand Up @@ -195,7 +195,7 @@ impl<T> Rc<T> {
// there is an implicit weak pointer owned by all the strong pointers, which
// ensures that the weak destructor never frees the allocation while the strong
// destructor is running, even if the weak pointer is stored inside the strong one.
_ptr: NonZero(transmute(box RcBox {
_ptr: NonZero::new(transmute(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1)
Expand Down Expand Up @@ -280,8 +280,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
let val = ptr::read(&*rc); // copy the contained object
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
let NonZero(ptr) = rc._ptr;
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>());
forget(rc);
Ok(val)
Expand Down Expand Up @@ -311,10 +310,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
#[experimental]
pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
if is_unique(rc) {
let inner = unsafe {
let NonZero(ptr) = rc._ptr;
&mut *ptr
};
let inner = unsafe { &mut **rc._ptr };
Some(&mut inner.value)
} else {
None
Expand Down Expand Up @@ -346,10 +342,7 @@ impl<T: Clone> Rc<T> {
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the `Rc<T>` itself to be `mut`, so we're returning the only
// possible reference to the inner value.
let inner = unsafe {
let NonZero(ptr) = self._ptr;
&mut *ptr
};
let inner = unsafe { &mut **self._ptr };
&mut inner.value
}
}
Expand Down Expand Up @@ -397,7 +390,7 @@ impl<T> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
let NonZero(ptr) = self._ptr;
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_strong();
if self.strong() == 0 {
Expand Down Expand Up @@ -689,7 +682,7 @@ impl<T> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
unsafe {
let NonZero(ptr) = self._ptr;
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all the strong pointers
Expand Down Expand Up @@ -750,18 +743,12 @@ trait RcBoxPtr<T> {

impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
let NonZero(ptr) = self._ptr;
unsafe { &(*ptr) }
}
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}

impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
let NonZero(ptr) = self._ptr;
unsafe { &(*ptr) }
}
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}

#[cfg(test)]
Expand Down
46 changes: 20 additions & 26 deletions src/libcollections/vec.rs
Expand Up @@ -176,7 +176,7 @@ impl<T> Vec<T> {
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: 0 }
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
}

/// Constructs a new, empty `Vec<T>` with the specified capacity.
Expand Down Expand Up @@ -209,15 +209,15 @@ impl<T> Vec<T> {
#[stable]
pub fn with_capacity(capacity: uint) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { ptr: NonZero(EMPTY as *mut T), len: 0, cap: uint::MAX }
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
} else if capacity == 0 {
Vec::new()
} else {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
Vec { ptr: NonZero(ptr as *mut T), len: 0, cap: capacity }
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
}
}

Expand Down Expand Up @@ -284,7 +284,7 @@ impl<T> Vec<T> {
#[unstable = "needs finalization"]
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
capacity: uint) -> Vec<T> {
Vec { ptr: NonZero(ptr), len: length, cap: capacity }
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
}

/// Creates a vector by copying the elements from a raw pointer.
Expand Down Expand Up @@ -792,24 +792,23 @@ impl<T> Vec<T> {
pub fn shrink_to_fit(&mut self) {
if mem::size_of::<T>() == 0 { return }

let NonZero(ptr) = self.ptr;
if self.len == 0 {
if self.cap != 0 {
unsafe {
dealloc(ptr, self.cap)
dealloc(*self.ptr, self.cap)
}
self.cap = 0;
}
} else {
unsafe {
// Overflow check is unnecessary as the vector is already at
// least this large.
let ptr = reallocate(ptr as *mut u8,
let ptr = reallocate(*self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero(ptr);
self.ptr = NonZero::new(ptr);
}
self.cap = self.len;
}
Expand Down Expand Up @@ -867,10 +866,9 @@ impl<T> Vec<T> {
#[inline]
#[stable]
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
let NonZero(ptr) = self.ptr;
unsafe {
mem::transmute(RawSlice {
data: ptr as *const T,
data: *self.ptr as *const T,
len: self.len,
})
}
Expand All @@ -893,7 +891,7 @@ impl<T> Vec<T> {
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn into_iter(self) -> IntoIter<T> {
unsafe {
let NonZero(ptr) = self.ptr;
let ptr = *self.ptr;
let cap = self.cap;
let begin = ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
Expand Down Expand Up @@ -1113,16 +1111,15 @@ impl<T> Vec<T> {
let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
if old_size > size { panic!("capacity overflow") }
unsafe {
let NonZero(ptr) = self.ptr;
let ptr = alloc_or_realloc(ptr, old_size, size);
let ptr = alloc_or_realloc(*self.ptr, old_size, size);
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero(ptr);
self.ptr = NonZero::new(ptr);
}
self.cap = max(self.cap, 2) * 2;
}

unsafe {
let NonZero(end) = self.ptr.offset(self.len as int);
let end = *self.ptr.offset(self.len as int);
ptr::write(&mut *end, value);
self.len += 1;
}
Expand Down Expand Up @@ -1167,11 +1164,11 @@ impl<T> Vec<T> {
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
unsafe {
let begin = self.ptr.0 as *const T;
let begin = *self.ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
(self.ptr.0 as uint + self.len()) as *const T
(*self.ptr as uint + self.len()) as *const T
} else {
self.ptr.0.offset(self.len() as int) as *const T
(*self.ptr).offset(self.len() as int) as *const T
};
self.set_len(0);
Drain {
Expand Down Expand Up @@ -1236,10 +1233,9 @@ impl<T> Vec<T> {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
unsafe {
let NonZero(ptr) = self.ptr;
let ptr = alloc_or_realloc(ptr, self.cap * mem::size_of::<T>(), size);
let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero(ptr);
self.ptr = NonZero::new(ptr);
}
self.cap = capacity;
}
Expand Down Expand Up @@ -1360,10 +1356,9 @@ impl<T> AsSlice<T> for Vec<T> {
#[inline]
#[stable]
fn as_slice<'a>(&'a self) -> &'a [T] {
let NonZero(ptr) = self.ptr;
unsafe {
mem::transmute(RawSlice {
data: ptr as *const T,
data: *self.ptr as *const T,
len: self.len
})
}
Expand All @@ -1388,8 +1383,7 @@ impl<T> Drop for Vec<T> {
for x in self.iter() {
ptr::read(x);
}
let NonZero(ptr) = self.ptr;
dealloc(ptr, self.cap)
dealloc(*self.ptr, self.cap)
}
}
}
Expand Down Expand Up @@ -1427,7 +1421,7 @@ impl<T> IntoIter<T> {
for _x in self { }
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
mem::forget(self);
Vec { ptr: NonZero(allocation), cap: cap, len: 0 }
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
}
}

Expand Down
24 changes: 21 additions & 3 deletions src/libcore/ptr.rs
Expand Up @@ -90,10 +90,10 @@
use mem;
use clone::Clone;
use intrinsics;
use kinds::Copy;
use kinds::{Copy, Send, Sync};
use ops::Deref;
use option::Option;
use option::Option::{Some, None};
use kinds::{Send, Sync};

use cmp::{PartialEq, Eq, Ord, PartialOrd, Equiv};
use cmp::Ordering;
Expand All @@ -115,7 +115,25 @@ pub use intrinsics::set_memory;
/// NULL or 0 that might allow certain optimizations.
#[lang="non_zero"]
#[deriving(Clone, PartialEq, Eq, PartialOrd)]
pub struct NonZero<T>(pub T);
#[experimental]
pub struct NonZero<T>(T);

impl<T> NonZero<T> {
/// Create an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
}

impl<T> Deref<T> for NonZero<T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
let NonZero(ref inner) = *self;
inner
}
}

impl<T: Copy> Copy for NonZero<T> {}

Expand Down

0 comments on commit 466135b

Please sign in to comment.