Skip to content

Commit

Permalink
Remove drop flags from structs and enums implementing Drop.
Browse files Browse the repository at this point in the history
  • Loading branch information
eddyb committed Aug 24, 2016
1 parent d0654ae commit 119508c
Show file tree
Hide file tree
Showing 39 changed files with 304 additions and 934 deletions.
19 changes: 2 additions & 17 deletions src/liballoc/arc.rs
Expand Up @@ -121,7 +121,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
/// }
/// ```

#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T: ?Sized> {
ptr: Shared<ArcInner<T>>,
Expand All @@ -147,7 +147,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
/// nodes behind strong `Arc<T>` pointers, and then storing the parent pointers
/// as `Weak<T>` pointers.

#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
ptr: Shared<ArcInner<T>>,
Expand Down Expand Up @@ -559,15 +559,6 @@ impl<T: ?Sized> Drop for Arc<T> {
#[unsafe_destructor_blind_to_params]
#[inline]
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once)
let thin = *self.ptr as *const ();

if thin as usize == mem::POST_DROP_USIZE {
return;
}

// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
// same logic applies to the below `fetch_sub` to the `weak` count.
Expand Down Expand Up @@ -755,12 +746,6 @@ impl<T: ?Sized> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
let ptr = *self.ptr;
let thin = ptr as *const ();

// see comments above for why this check is here
if thin as usize == mem::POST_DROP_USIZE {
return;
}

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
Expand Down
2 changes: 1 addition & 1 deletion src/liballoc/lib.rs
Expand Up @@ -88,7 +88,7 @@
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag, filling_drop)]
#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
#![feature(unsize)]

#![cfg_attr(not(test), feature(fused, raw, fn_traits, placement_new_protocol))]
Expand Down
11 changes: 2 additions & 9 deletions src/liballoc/raw_vec.rs
Expand Up @@ -44,7 +44,7 @@ use core::cmp;
/// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity
/// field. This allows zero-sized types to not be special-cased by consumers of
/// this type.
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
pub struct RawVec<T> {
ptr: Unique<T>,
cap: usize,
Expand Down Expand Up @@ -546,21 +546,14 @@ impl<T> RawVec<T> {
mem::forget(self);
output
}

/// This is a stupid name in the hopes that someone will find this in the
/// not too distant future and remove it with the rest of
/// #[unsafe_no_drop_flag]
pub fn unsafe_no_drop_flag_needs_drop(&self) -> bool {
self.cap != mem::POST_DROP_USIZE
}
}

impl<T> Drop for RawVec<T> {
#[unsafe_destructor_blind_to_params]
/// Frees the memory owned by the RawVec *without* trying to Drop its contents.
fn drop(&mut self) {
let elem_size = mem::size_of::<T>();
if elem_size != 0 && self.cap != 0 && self.unsafe_no_drop_flag_needs_drop() {
if elem_size != 0 && self.cap != 0 {
let align = mem::align_of::<T>();

let num_bytes = elem_size * self.cap;
Expand Down
38 changes: 16 additions & 22 deletions src/liballoc/rc.rs
Expand Up @@ -182,7 +182,7 @@ struct RcBox<T: ?Sized> {
/// A reference-counted pointer type over an immutable value.
///
/// See the [module level documentation](./index.html) for more details.
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T: ?Sized> {
ptr: Shared<RcBox<T>>,
Expand Down Expand Up @@ -466,21 +466,18 @@ impl<T: ?Sized> Drop for Rc<T> {
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let thin = ptr as *const ();

if thin as usize != mem::POST_DROP_USIZE {
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
ptr::drop_in_place(&mut (*ptr).value);
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
ptr::drop_in_place(&mut (*ptr).value);

// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
self.dec_weak();
// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
self.dec_weak();

if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
Expand Down Expand Up @@ -724,7 +721,7 @@ impl<T> From<T> for Rc<T> {
/// dropped.
///
/// See the [module level documentation](./index.html) for more.
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
ptr: Shared<RcBox<T>>,
Expand Down Expand Up @@ -825,15 +822,12 @@ impl<T: ?Sized> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let thin = ptr as *const ();

if thin as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/libcollections/lib.rs
Expand Up @@ -52,7 +52,7 @@
#![feature(step_by)]
#![feature(unicode)]
#![feature(unique)]
#![feature(unsafe_no_drop_flag)]
#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
#![cfg_attr(test, feature(rand, test))]

#![no_std]
Expand Down
10 changes: 4 additions & 6 deletions src/libcollections/vec.rs
Expand Up @@ -268,7 +268,7 @@ use super::range::RangeArgument;
/// Vec does not currently guarantee the order in which elements are dropped
/// (the order has changed in the past, and may change again).
///
#[unsafe_no_drop_flag]
#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Vec<T> {
buf: RawVec<T>,
Expand Down Expand Up @@ -1600,11 +1600,9 @@ impl<T: Ord> Ord for Vec<T> {
impl<T> Drop for Vec<T> {
#[unsafe_destructor_blind_to_params]
fn drop(&mut self) {
if self.buf.unsafe_no_drop_flag_needs_drop() {
unsafe {
// use drop for [T]
ptr::drop_in_place(&mut self[..]);
}
unsafe {
// use drop for [T]
ptr::drop_in_place(&mut self[..]);
}
// RawVec handles deallocation
}
Expand Down
13 changes: 0 additions & 13 deletions src/libcore/intrinsics.rs
Expand Up @@ -244,19 +244,6 @@ extern "rust-intrinsic" {
/// crate it is invoked in.
pub fn type_id<T: ?Sized + 'static>() -> u64;

/// Creates a value initialized to so that its drop flag,
/// if any, says that it has been dropped.
///
/// `init_dropped` is unsafe because it returns a datum with all
/// of its bytes set to the drop flag, which generally does not
/// correspond to a valid value.
///
/// This intrinsic is likely to be deprecated in the future when
/// Rust moves to non-zeroing dynamic drop (and thus removes the
/// embedded drop flags that are being established by this
/// intrinsic).
pub fn init_dropped<T>() -> T;

/// Creates a value initialized to zero.
///
/// `init` is unsafe because it returns a zeroed-out datum,
Expand Down
71 changes: 0 additions & 71 deletions src/libcore/mem.rs
Expand Up @@ -241,27 +241,6 @@ pub unsafe fn zeroed<T>() -> T {
intrinsics::init()
}

/// Creates a value initialized to an unspecified series of bytes.
///
/// The byte sequence usually indicates that the value at the memory
/// in question has been dropped. Thus, *if* T carries a drop flag,
/// any associated destructor will not be run when the value falls out
/// of scope.
///
/// Some code at one time used the `zeroed` function above to
/// accomplish this goal.
///
/// This function is expected to be deprecated with the transition
/// to non-zeroing drop.
#[inline]
#[unstable(feature = "filling_drop", issue = "5016")]
pub unsafe fn dropped<T>() -> T {
#[inline(always)]
unsafe fn dropped_impl<T>() -> T { intrinsics::init_dropped() }

dropped_impl()
}

/// Bypasses Rust's normal memory-initialization checks by pretending to
/// produce a value of type T, while doing nothing at all.
///
Expand Down Expand Up @@ -518,56 +497,6 @@ pub fn replace<T>(dest: &mut T, mut src: T) -> T {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn drop<T>(_x: T) { }

macro_rules! repeat_u8_as_u16 {
($name:expr) => { (($name as u16) << 8 |
($name as u16)) }
}
macro_rules! repeat_u8_as_u32 {
($name:expr) => { (($name as u32) << 24 |
($name as u32) << 16 |
($name as u32) << 8 |
($name as u32)) }
}
macro_rules! repeat_u8_as_u64 {
($name:expr) => { ((repeat_u8_as_u32!($name) as u64) << 32 |
(repeat_u8_as_u32!($name) as u64)) }
}

// NOTE: Keep synchronized with values used in librustc_trans::trans::adt.
//
// In particular, the POST_DROP_U8 marker must never equal the
// DTOR_NEEDED_U8 marker.
//
// For a while pnkfelix was using 0xc1 here.
// But having the sign bit set is a pain, so 0x1d is probably better.
//
// And of course, 0x00 brings back the old world of zero'ing on drop.
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U8: u8 = 0x1d;
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U16: u16 = repeat_u8_as_u16!(POST_DROP_U8);
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U32: u32 = repeat_u8_as_u32!(POST_DROP_U8);
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_U64: u64 = repeat_u8_as_u64!(POST_DROP_U8);

#[cfg(target_pointer_width = "16")]
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_USIZE: usize = POST_DROP_U16 as usize;
#[cfg(target_pointer_width = "32")]
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_USIZE: usize = POST_DROP_U32 as usize;
#[cfg(target_pointer_width = "64")]
#[unstable(feature = "filling_drop", issue = "5016")]
#[allow(missing_docs)]
pub const POST_DROP_USIZE: usize = POST_DROP_U64 as usize;

/// Interprets `src` as `&U`, and then reads `src` without moving the contained
/// value.
///
Expand Down
15 changes: 0 additions & 15 deletions src/libcore/ptr.rs
Expand Up @@ -140,21 +140,6 @@ pub unsafe fn read<T>(src: *const T) -> T {
tmp
}

#[allow(missing_docs)]
#[inline(always)]
#[unstable(feature = "filling_drop",
reason = "may play a larger role in std::ptr future extensions",
issue = "5016")]
pub unsafe fn read_and_drop<T>(dest: *mut T) -> T {
// Copy the data out from `dest`:
let tmp = read(&*dest);

// Now mark `dest` as dropped:
write_bytes(dest, mem::POST_DROP_U8, 1);

tmp
}

/// Overwrites a memory location with the given value without reading or
/// dropping the old value.
///
Expand Down
6 changes: 0 additions & 6 deletions src/librustc/session/config.rs
Expand Up @@ -889,8 +889,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"adds unstable command line options to rustc interface"),
force_overflow_checks: Option<bool> = (None, parse_opt_bool, [TRACKED],
"force overflow checks on or off"),
force_dropflag_checks: Option<bool> = (None, parse_opt_bool, [TRACKED],
"force drop flag checks on or off"),
trace_macros: bool = (false, parse_bool, [UNTRACKED],
"for every macro invocation, print its name and arguments"),
enable_nonzeroing_move_hints: bool = (false, parse_bool, [TRACKED],
Expand Down Expand Up @@ -2427,10 +2425,6 @@ mod tests {
opts.debugging_opts.force_overflow_checks = Some(true);
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());

opts = reference.clone();
opts.debugging_opts.force_dropflag_checks = Some(true);
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());

opts = reference.clone();
opts.debugging_opts.enable_nonzeroing_move_hints = true;
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
Expand Down

0 comments on commit 119508c

Please sign in to comment.