Skip to content

Commit

Permalink
Actually deprecate the Heap type
Browse files Browse the repository at this point in the history
  • Loading branch information
SimonSapin committed Apr 12, 2018
1 parent 88ebd2d commit e521b8b
Show file tree
Hide file tree
Showing 9 changed files with 47 additions and 46 deletions.
8 changes: 6 additions & 2 deletions src/liballoc/alloc.rs
Expand Up @@ -81,8 +81,12 @@ pub struct Global;

#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
pub use self::Global as Heap;
pub type Heap = Global;

#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
#[allow(non_upper_case_globals)]
pub const Heap: Global = Global;

unsafe impl Alloc for Global {
#[inline]
Expand Down Expand Up @@ -268,7 +272,7 @@ mod tests {
extern crate test;
use self::test::Bencher;
use boxed::Box;
use heap::{Global, Alloc, Layout};
use alloc::{Global, Alloc, Layout};

#[test]
fn allocate_zeroed() {
Expand Down
13 changes: 6 additions & 7 deletions src/liballoc/arc.rs
Expand Up @@ -21,7 +21,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::borrow;
use core::fmt;
use core::cmp::Ordering;
use core::heap::{Alloc, Layout};
use core::intrinsics::abort;
use core::mem::{self, align_of_val, size_of_val, uninitialized};
use core::ops::Deref;
Expand All @@ -32,7 +31,7 @@ use core::hash::{Hash, Hasher};
use core::{isize, usize};
use core::convert::From;

use heap::{Heap, box_free};
use alloc::{Global, Alloc, Layout, box_free};
use boxed::Box;
use string::String;
use vec::Vec;
Expand Down Expand Up @@ -521,7 +520,7 @@ impl<T: ?Sized> Arc<T> {

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
}
}

Expand Down Expand Up @@ -555,8 +554,8 @@ impl<T: ?Sized> Arc<T> {

let layout = Layout::for_value(&*fake_ptr);

let mem = Heap.alloc(layout)
.unwrap_or_else(|e| Heap.oom(e));
let mem = Global.alloc(layout)
.unwrap_or_else(|e| Global.oom(e));

// Initialize the real ArcInner
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
Expand Down Expand Up @@ -640,7 +639,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);

Heap.dealloc(self.mem, self.layout.clone());
Global.dealloc(self.mem, self.layout.clone());
}
}
}
Expand Down Expand Up @@ -1161,7 +1160,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
}
}
}
Expand Down
13 changes: 6 additions & 7 deletions src/liballoc/btree/node.rs
Expand Up @@ -41,14 +41,13 @@
// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
// This implies that even an empty internal node has at least one edge.

use core::heap::{Alloc, Layout};
use core::marker::PhantomData;
use core::mem;
use core::ptr::{self, Unique, NonNull};
use core::slice;

use alloc::{Global, Alloc, Layout};
use boxed::Box;
use heap::Heap;

const B: usize = 6;
pub const MIN_LEN: usize = B - 1;
Expand Down Expand Up @@ -250,7 +249,7 @@ impl<K, V> Root<K, V> {
self.as_mut().as_leaf_mut().parent = ptr::null();

unsafe {
Heap.dealloc(top, Layout::new::<InternalNode<K, V>>());
Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
}
}
}
Expand Down Expand Up @@ -436,7 +435,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
> {
let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
ret
}
}
Expand All @@ -457,7 +456,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
> {
let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
ret
}
}
Expand Down Expand Up @@ -1239,12 +1238,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
).correct_parent_link();
}

Heap.dealloc(
Global.dealloc(
right_node.node.as_ptr() as *mut u8,
Layout::new::<InternalNode<K, V>>(),
);
} else {
Heap.dealloc(
Global.dealloc(
right_node.node.as_ptr() as *mut u8,
Layout::new::<LeafNode<K, V>>(),
);
Expand Down
23 changes: 11 additions & 12 deletions src/liballoc/raw_vec.rs
Expand Up @@ -8,13 +8,12 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use alloc::{Alloc, Layout, Global};
use core::cmp;
use core::heap::{Alloc, Layout};
use core::mem;
use core::ops::Drop;
use core::ptr::{self, Unique};
use core::slice;
use heap::Heap;
use super::boxed::Box;
use super::allocator::CollectionAllocErr;
use super::allocator::CollectionAllocErr::*;
Expand Down Expand Up @@ -47,7 +46,7 @@ use super::allocator::CollectionAllocErr::*;
/// field. This allows zero-sized types to not be special-cased by consumers of
/// this type.
#[allow(missing_debug_implementations)]
pub struct RawVec<T, A: Alloc = Heap> {
pub struct RawVec<T, A: Alloc = Global> {
ptr: Unique<T>,
cap: usize,
a: A,
Expand Down Expand Up @@ -114,14 +113,14 @@ impl<T, A: Alloc> RawVec<T, A> {
}
}

impl<T> RawVec<T, Heap> {
impl<T> RawVec<T, Global> {
/// Creates the biggest possible RawVec (on the system heap)
/// without allocating. If T has positive size, then this makes a
/// RawVec with capacity 0. If T has 0 size, then it makes a
/// RawVec with capacity `usize::MAX`. Useful for implementing
/// delayed allocation.
pub fn new() -> Self {
Self::new_in(Heap)
Self::new_in(Global)
}

/// Creates a RawVec (on the system heap) with exactly the
Expand All @@ -141,13 +140,13 @@ impl<T> RawVec<T, Heap> {
/// Aborts on OOM
#[inline]
pub fn with_capacity(cap: usize) -> Self {
RawVec::allocate_in(cap, false, Heap)
RawVec::allocate_in(cap, false, Global)
}

/// Like `with_capacity` but guarantees the buffer is zeroed.
#[inline]
pub fn with_capacity_zeroed(cap: usize) -> Self {
RawVec::allocate_in(cap, true, Heap)
RawVec::allocate_in(cap, true, Global)
}
}

Expand All @@ -168,7 +167,7 @@ impl<T, A: Alloc> RawVec<T, A> {
}
}

impl<T> RawVec<T, Heap> {
impl<T> RawVec<T, Global> {
/// Reconstitutes a RawVec from a pointer, capacity.
///
/// # Undefined Behavior
Expand All @@ -180,7 +179,7 @@ impl<T> RawVec<T, Heap> {
RawVec {
ptr: Unique::new_unchecked(ptr),
cap,
a: Heap,
a: Global,
}
}

Expand Down Expand Up @@ -678,7 +677,7 @@ impl<T, A: Alloc> RawVec<T, A> {
}
}

impl<T> RawVec<T, Heap> {
impl<T> RawVec<T, Global> {
/// Converts the entire buffer into `Box<[T]>`.
///
/// While it is not *strictly* Undefined Behavior to call
Expand Down Expand Up @@ -763,13 +762,13 @@ mod tests {
if size > self.fuel {
return Err(AllocErr::Unsupported { details: "fuel exhausted" });
}
match Heap.alloc(layout) {
match Global.alloc(layout) {
ok @ Ok(_) => { self.fuel -= size; ok }
err @ Err(_) => err,
}
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
Heap.dealloc(ptr, layout)
Global.dealloc(ptr, layout)
}
}

Expand Down
13 changes: 6 additions & 7 deletions src/liballoc/rc.rs
Expand Up @@ -250,7 +250,6 @@ use core::cell::Cell;
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::heap::{Alloc, Layout};
use core::intrinsics::abort;
use core::marker;
use core::marker::{Unsize, PhantomData};
Expand All @@ -260,7 +259,7 @@ use core::ops::CoerceUnsized;
use core::ptr::{self, NonNull};
use core::convert::From;

use heap::{Heap, box_free};
use alloc::{Global, Alloc, Layout, box_free};
use string::String;
use vec::Vec;

Expand Down Expand Up @@ -668,8 +667,8 @@ impl<T: ?Sized> Rc<T> {

let layout = Layout::for_value(&*fake_ptr);

let mem = Heap.alloc(layout)
.unwrap_or_else(|e| Heap.oom(e));
let mem = Global.alloc(layout)
.unwrap_or_else(|e| Global.oom(e));

// Initialize the real RcBox
let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
Expand Down Expand Up @@ -752,7 +751,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);

Heap.dealloc(self.mem, self.layout.clone());
Global.dealloc(self.mem, self.layout.clone());
}
}
}
Expand Down Expand Up @@ -847,7 +846,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.dec_weak();

if self.weak() == 0 {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
}
}
}
Expand Down Expand Up @@ -1273,7 +1272,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/liballoc/tests/heap.rs
Expand Up @@ -9,7 +9,7 @@
// except according to those terms.

use alloc_system::System;
use std::heap::{Heap, Alloc, Layout};
use std::alloc::{Global, Alloc, Layout};

/// https://github.com/rust-lang/rust/issues/45955
///
Expand All @@ -22,7 +22,7 @@ fn alloc_system_overaligned_request() {

#[test]
fn std_heap_overaligned_request() {
check_overalign_requests(Heap)
check_overalign_requests(Global)
}

fn check_overalign_requests<T: Alloc>(mut allocator: T) {
Expand Down
3 changes: 2 additions & 1 deletion src/libstd/alloc.rs
Expand Up @@ -12,7 +12,8 @@

#![unstable(issue = "32838", feature = "allocator_api")]

#[doc(inline)] pub use alloc_crate::alloc::Heap;
#[doc(inline)] #[allow(deprecated)] pub use alloc_crate::alloc::Heap;
#[doc(inline)] pub use alloc_crate::alloc::Global;
#[doc(inline)] pub use alloc_system::System;
#[doc(inline)] pub use core::alloc::*;

Expand Down
4 changes: 2 additions & 2 deletions src/libstd/collections/hash/map.rs
Expand Up @@ -11,13 +11,13 @@
use self::Entry::*;
use self::VacantEntryState::*;

use alloc::{Global, Alloc, CollectionAllocErr};
use cell::Cell;
use borrow::Borrow;
use cmp::max;
use fmt::{self, Debug};
#[allow(deprecated)]
use hash::{Hash, Hasher, BuildHasher, SipHasher13};
use heap::{Heap, Alloc, CollectionAllocErr};
use iter::{FromIterator, FusedIterator};
use mem::{self, replace};
use ops::{Deref, Index};
Expand Down Expand Up @@ -784,7 +784,7 @@ impl<K, V, S> HashMap<K, V, S>
pub fn reserve(&mut self, additional: usize) {
match self.try_reserve(additional) {
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
Ok(()) => { /* yay */ }
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/libstd/collections/hash/table.rs
Expand Up @@ -8,9 +8,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use alloc::{Global, Alloc, Layout, CollectionAllocErr};
use cmp;
use hash::{BuildHasher, Hash, Hasher};
use heap::{Heap, Alloc, Layout, CollectionAllocErr};
use marker;
use mem::{align_of, size_of, needs_drop};
use mem;
Expand Down Expand Up @@ -754,7 +754,7 @@ impl<K, V> RawTable<K, V> {
return Err(CollectionAllocErr::CapacityOverflow);
}

let buffer = Heap.alloc(Layout::from_size_align(size, alignment)
let buffer = Global.alloc(Layout::from_size_align(size, alignment)
.ok_or(CollectionAllocErr::CapacityOverflow)?)?;

let hashes = buffer as *mut HashUint;
Expand All @@ -772,7 +772,7 @@ impl<K, V> RawTable<K, V> {
unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
match Self::try_new_uninitialized(capacity) {
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
Ok(table) => { table }
}
}
Expand Down Expand Up @@ -811,7 +811,7 @@ impl<K, V> RawTable<K, V> {
pub fn new(capacity: usize) -> RawTable<K, V> {
match Self::try_new(capacity) {
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
Ok(table) => { table }
}
}
Expand Down Expand Up @@ -1185,8 +1185,8 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
debug_assert!(!oflo, "should be impossible");

unsafe {
Heap.dealloc(self.hashes.ptr() as *mut u8,
Layout::from_size_align(size, align).unwrap());
Global.dealloc(self.hashes.ptr() as *mut u8,
Layout::from_size_align(size, align).unwrap());
// Remember how everything was allocated out of one buffer
// during initialization? We only need one call to free here.
}
Expand Down

0 comments on commit e521b8b

Please sign in to comment.