diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index 39450f69ce30a..e852f309c8cf5 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -5,8 +5,8 @@ #[stable(feature = "alloc_module", since = "1.28.0")] #[doc(inline)] pub use core::alloc::*; -use core::hint; use core::ptr::{self, NonNull}; +use core::{cmp, hint}; unsafe extern "Rust" { // These are the magic symbols to call the global allocator. rustc generates @@ -182,7 +182,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { impl Global { #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces - fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result, AllocError> { + fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result, AllocError> { match layout.size() { 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), // SAFETY: `layout` is non-zero in size, @@ -194,10 +194,26 @@ impl Global { } } + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + fn deallocate_impl_runtime(ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + // SAFETY: + // * We have checked that `layout` is non-zero in size. + // * The caller is obligated to provide a layout that "fits", and in this case, + // "fit" always means a layout that is equal to the original, because our + // `allocate()`, `grow()`, and `shrink()` implementations never returns a larger + // allocation than requested. + // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s + // safety documentation. + unsafe { dealloc(ptr.as_ptr(), layout) } + } + } + // SAFETY: Same as `Allocator::grow` #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces - unsafe fn grow_impl( + fn grow_impl_runtime( &self, ptr: NonNull, old_layout: Layout, @@ -241,10 +257,176 @@ impl Global { }, } } + + // SAFETY: Same as `Allocator::grow` + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + fn shrink_impl_runtime( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + _zeroed: bool, + ) -> Result, AllocError> { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + match new_layout.size() { + // SAFETY: conditions must be upheld by the caller + 0 => unsafe { + self.deallocate(ptr, old_layout); + Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) + }, + + // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller + new_size if old_layout.align() == new_layout.align() => unsafe { + // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. + hint::assert_unchecked(new_size <= old_layout.size()); + + let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); + let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; + Ok(NonNull::slice_from_raw_parts(ptr, new_size)) + }, + + // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, + // both the old and new memory allocation are valid for reads and writes for `new_size` + // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap + // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract + // for `dealloc` must be upheld by the caller. + new_size => unsafe { + let new_ptr = self.allocate(new_layout)?; + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); + self.deallocate(ptr, old_layout); + Ok(new_ptr) + }, + } + } + + // SAFETY: Same as `Allocator::allocate` + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result, AllocError> { + core::intrinsics::const_eval_select( + (layout, zeroed), + Global::alloc_impl_const, + Global::alloc_impl_runtime, + ) + } + + // SAFETY: Same as `Allocator::deallocate` + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn deallocate_impl(&self, ptr: NonNull, layout: Layout) { + core::intrinsics::const_eval_select( + (ptr, layout), + Global::deallocate_impl_const, + Global::deallocate_impl_runtime, + ) + } + + // SAFETY: Same as `Allocator::grow` + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn grow_impl( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + zeroed: bool, + ) -> Result, AllocError> { + core::intrinsics::const_eval_select( + (self, ptr, old_layout, new_layout, zeroed), + Global::grow_shrink_impl_const, + Global::grow_impl_runtime, + ) + } + + // SAFETY: Same as `Allocator::shrink` + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn shrink_impl( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + core::intrinsics::const_eval_select( + (self, ptr, old_layout, new_layout, false), + Global::grow_shrink_impl_const, + Global::shrink_impl_runtime, + ) + } + + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result, AllocError> { + match layout.size() { + 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), + // SAFETY: `layout` is non-zero in size, + size => unsafe { + let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align()); + let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; + if zeroed { + let mut offset = 0; + while offset < size { + offset += 1; + // SAFETY: the pointer returned by `const_allocate` is valid to write to. + ptr.add(offset).write(0) + } + } + Ok(NonNull::slice_from_raw_parts(ptr, size)) + }, + } + } + + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const fn deallocate_impl_const(ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + // SAFETY: We checked for nonzero size; other preconditions must be upheld by caller. + unsafe { + core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align()); + } + } + } + + #[inline] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const fn grow_shrink_impl_const( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + zeroed: bool, + ) -> Result, AllocError> { + let new_ptr = self.alloc_impl(new_layout, zeroed)?; + // SAFETY: both pointers are valid and this operations is in bounds. + unsafe { + ptr::copy_nonoverlapping( + ptr.as_ptr(), + new_ptr.as_mut_ptr(), + cmp::min(old_layout.size(), new_layout.size()), + ); + } + unsafe { + self.deallocate_impl(ptr, old_layout); + } + Ok(new_ptr) + } } #[unstable(feature = "allocator_api", issue = "32838")] -unsafe impl Allocator for Global { +#[rustc_const_unstable(feature = "const_heap", issue = "79597")] +unsafe impl const Allocator for Global { #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces fn allocate(&self, layout: Layout) -> Result, AllocError> { @@ -260,17 +442,8 @@ unsafe impl Allocator for Global { #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { - if layout.size() != 0 { - // SAFETY: - // * We have checked that `layout` is non-zero in size. - // * The caller is obligated to provide a layout that "fits", and in this case, - // "fit" always means a layout that is equal to the original, because our - // `allocate()`, `grow()`, and `shrink()` implementations never returns a larger - // allocation than requested. - // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s - // safety documentation. - unsafe { dealloc(ptr.as_ptr(), layout) } - } + // SAFETY: all conditions must be upheld by the caller + unsafe { self.deallocate_impl(ptr, layout) } } #[inline] @@ -305,40 +478,8 @@ unsafe impl Allocator for Global { old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { - debug_assert!( - new_layout.size() <= old_layout.size(), - "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" - ); - - match new_layout.size() { - // SAFETY: conditions must be upheld by the caller - 0 => unsafe { - self.deallocate(ptr, old_layout); - Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) - }, - - // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller - new_size if old_layout.align() == new_layout.align() => unsafe { - // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. - hint::assert_unchecked(new_size <= old_layout.size()); - - let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); - let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; - Ok(NonNull::slice_from_raw_parts(ptr, new_size)) - }, - - // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, - // both the old and new memory allocation are valid for reads and writes for `new_size` - // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap - // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract - // for `dealloc` must be upheld by the caller. - new_size => unsafe { - let new_ptr = self.allocate(new_layout)?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); - self.deallocate(ptr, old_layout); - Ok(new_ptr) - }, - } + // SAFETY: all conditions must be upheld by the caller + unsafe { self.shrink_impl(ptr, old_layout, new_layout) } } } diff --git a/library/alloc/src/collections/mod.rs b/library/alloc/src/collections/mod.rs index 212d7c8465b6e..e09326759fd1e 100644 --- a/library/alloc/src/collections/mod.rs +++ b/library/alloc/src/collections/mod.rs @@ -84,13 +84,14 @@ impl TryReserveError { reason = "Uncertain how much info should be exposed", issue = "48043" )] - pub fn kind(&self) -> TryReserveErrorKind { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub const fn kind(&self) -> TryReserveErrorKind { self.kind.clone() } } /// Details of the allocation that caused a `TryReserveError` -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(PartialEq, Eq, Debug)] #[unstable( feature = "try_reserve_kind", reason = "Uncertain how much info should be exposed", @@ -120,6 +121,24 @@ pub enum TryReserveErrorKind { }, } +#[unstable( + feature = "try_reserve_kind", + reason = "Uncertain how much info should be exposed", + issue = "48043" +)] +#[rustc_const_unstable(feature = "const_heap", issue = "79597")] +#[cfg(not(test))] +impl const Clone for TryReserveErrorKind { + fn clone(&self) -> Self { + match self { + TryReserveErrorKind::CapacityOverflow => TryReserveErrorKind::CapacityOverflow, + TryReserveErrorKind::AllocError { layout, non_exhaustive: () } => { + TryReserveErrorKind::AllocError { layout: *layout, non_exhaustive: () } + } + } + } +} + #[cfg(test)] pub use realalloc::collections::TryReserveErrorKind; diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 87ad5b0ce30e6..b840b4436b794 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -101,10 +101,15 @@ #![feature(char_max_len)] #![feature(clone_to_uninit)] #![feature(coerce_unsized)] +#![feature(const_clone)] +#![feature(const_cmp)] #![feature(const_convert)] #![feature(const_default)] +#![feature(const_destruct)] #![feature(const_eval_select)] #![feature(const_heap)] +#![feature(const_option_ops)] +#![feature(const_try)] #![feature(core_intrinsics)] #![feature(deprecated_suggestion)] #![feature(deref_pure_trait)] @@ -165,6 +170,7 @@ #![feature(const_trait_impl)] #![feature(coroutine_trait)] #![feature(decl_macro)] +#![feature(derive_const)] #![feature(dropck_eyepatch)] #![feature(fundamental)] #![feature(hashmap_internals)] diff --git a/library/alloc/src/raw_vec/mod.rs b/library/alloc/src/raw_vec/mod.rs index 236e33e2f450e..8c080c6fc17b0 100644 --- a/library/alloc/src/raw_vec/mod.rs +++ b/library/alloc/src/raw_vec/mod.rs @@ -4,7 +4,7 @@ // Note: This module is also included in the alloctests crate using #[path] to // run the tests. See the comment there for an explanation why this is the case. -use core::marker::PhantomData; +use core::marker::{Destruct, PhantomData}; use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ptr::{self, Alignment, NonNull, Unique}; use core::{cmp, hint}; @@ -24,7 +24,7 @@ mod tests; // only one location which panics rather than a bunch throughout the module. #[cfg(not(no_global_oom_handling))] #[cfg_attr(not(panic = "immediate-abort"), inline(never))] -fn capacity_overflow() -> ! { +const fn capacity_overflow() -> ! { panic!("capacity overflow"); } @@ -182,7 +182,11 @@ impl RawVec { /// allocator for the returned `RawVec`. #[cfg(not(no_global_oom_handling))] #[inline] - pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub(crate) const fn with_capacity_in(capacity: usize, alloc: A) -> Self + where + A: [const] Allocator + [const] Destruct, + { Self { inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT), _marker: PhantomData, @@ -331,7 +335,11 @@ impl RawVec { /// caller to ensure `len == self.capacity()`. #[cfg(not(no_global_oom_handling))] #[inline(never)] - pub(crate) fn grow_one(&mut self) { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub(crate) const fn grow_one(&mut self) + where + A: [const] Allocator, + { // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout unsafe { self.inner.grow_one(T::LAYOUT) } } @@ -415,7 +423,11 @@ impl RawVecInner { #[cfg(not(no_global_oom_handling))] #[inline] - fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self + where + A: [const] Allocator + [const] Destruct, + { match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) { Ok(this) => { unsafe { @@ -446,12 +458,16 @@ impl RawVecInner { } } - fn try_allocate_in( + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const fn try_allocate_in( capacity: usize, init: AllocInit, alloc: A, elem_layout: Layout, - ) -> Result { + ) -> Result + where + A: [const] Allocator + [const] Destruct, + { // We avoid `unwrap_or_else` here because it bloats the amount of // LLVM IR generated. let layout = match layout_array(capacity, elem_layout) { @@ -519,7 +535,8 @@ impl RawVecInner { /// initially construct `self` /// - `elem_layout`'s size must be a multiple of its alignment #[inline] - unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull, Layout)> { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull, Layout)> { if elem_layout.size() == 0 || self.cap.as_inner() == 0 { None } else { @@ -572,7 +589,11 @@ impl RawVecInner { /// - `elem_layout`'s size must be a multiple of its alignment #[cfg(not(no_global_oom_handling))] #[inline] - unsafe fn grow_one(&mut self, elem_layout: Layout) { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn grow_one(&mut self, elem_layout: Layout) + where + A: [const] Allocator, + { // SAFETY: Precondition passed to caller if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } { handle_error(err); @@ -651,12 +672,13 @@ impl RawVecInner { } #[inline] - fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool { + const fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool { additional > self.capacity(elem_layout.size()).wrapping_sub(len) } #[inline] - unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) { // Allocators currently return a `NonNull<[u8]>` whose length matches // the size requested. If that ever changes, the capacity here should // change to `ptr.len() / size_of::()`. @@ -669,12 +691,16 @@ impl RawVecInner { /// initially construct `self` /// - `elem_layout`'s size must be a multiple of its alignment /// - The sum of `len` and `additional` must be greater than the current capacity - unsafe fn grow_amortized( + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn grow_amortized( &mut self, len: usize, additional: usize, elem_layout: Layout, - ) -> Result<(), TryReserveError> { + ) -> Result<(), TryReserveError> + where + A: [const] Allocator, + { // This is ensured by the calling contexts. debug_assert!(additional > 0); @@ -737,15 +763,20 @@ impl RawVecInner { // not marked inline(never) since we want optimizers to be able to observe the specifics of this // function, see tests/codegen-llvm/vec-reserve-extend.rs. #[cold] - unsafe fn finish_grow( + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + const unsafe fn finish_grow( &self, cap: usize, elem_layout: Layout, - ) -> Result, TryReserveError> { + ) -> Result, TryReserveError> + where + A: [const] Allocator, + { let new_layout = layout_array(cap, elem_layout)?; let memory = if let Some((ptr, old_layout)) = unsafe { self.current_memory(elem_layout) } { - debug_assert_eq!(old_layout.align(), new_layout.align()); + // FIXME(const-hack): switch to `debug_assert_eq` + debug_assert!(old_layout.align() == new_layout.align()); unsafe { // The allocator checks for alignment equality hint::assert_unchecked(old_layout.align() == new_layout.align()); @@ -755,7 +786,11 @@ impl RawVecInner { self.alloc.allocate(new_layout) }; - memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) + // FIXME(const-hack): switch back to `map_err` + match memory { + Ok(memory) => Ok(memory), + Err(_) => Err(AllocError { layout: new_layout, non_exhaustive: () }.into()), + } } /// # Safety @@ -841,7 +876,8 @@ impl RawVecInner { #[cfg(not(no_global_oom_handling))] #[cold] #[optimize(size)] -fn handle_error(e: TryReserveError) -> ! { +#[rustc_const_unstable(feature = "const_heap", issue = "79597")] +const fn handle_error(e: TryReserveError) -> ! { match e.kind() { CapacityOverflow => capacity_overflow(), AllocError { layout, .. } => handle_alloc_error(layout), @@ -849,6 +885,11 @@ fn handle_error(e: TryReserveError) -> ! { } #[inline] -fn layout_array(cap: usize, elem_layout: Layout) -> Result { - elem_layout.repeat(cap).map(|(layout, _pad)| layout).map_err(|_| CapacityOverflow.into()) +#[rustc_const_unstable(feature = "const_heap", issue = "79597")] +const fn layout_array(cap: usize, elem_layout: Layout) -> Result { + // FIXME(const-hack) return to using `map` and `map_err` once `const_closures` is implemented + match elem_layout.repeat(cap) { + Ok((layout, _pad)) => Ok(layout), + Err(_) => Err(CapacityOverflow.into()), + } } diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 45d6c28e186e4..3c75d3545c98e 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -79,6 +79,8 @@ use core::cmp::Ordering; use core::hash::{Hash, Hasher}; #[cfg(not(no_global_oom_handling))] use core::iter; +#[cfg(not(no_global_oom_handling))] +use core::marker::Destruct; use core::marker::PhantomData; use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ops::{self, Index, IndexMut, Range, RangeBounds}; @@ -515,7 +517,8 @@ impl Vec { #[stable(feature = "rust1", since = "1.0.0")] #[must_use] #[rustc_diagnostic_item = "vec_with_capacity"] - pub fn with_capacity(capacity: usize) -> Self { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub const fn with_capacity(capacity: usize) -> Self { Self::with_capacity_in(capacity, Global) } @@ -844,6 +847,16 @@ impl Vec { // SAFETY: A `Vec` always has a non-null pointer. (unsafe { NonNull::new_unchecked(ptr) }, len, capacity) } + + /// Leaks the `Vec` to be interned statically. This mut be done for all + /// `Vec` created during compile time. + #[unstable(feature = "const_heap", issue = "79597")] + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub const fn const_leak(mut self) -> &'static [T] { + unsafe { core::intrinsics::const_make_global(self.as_mut_ptr().cast()) }; + let me = ManuallyDrop::new(self); + unsafe { slice::from_raw_parts(me.as_ptr(), me.len) } + } } impl Vec { @@ -925,7 +938,11 @@ impl Vec { #[cfg(not(no_global_oom_handling))] #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub const fn with_capacity_in(capacity: usize, alloc: A) -> Self + where + A: [const] Allocator + [const] Destruct, + { Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } } @@ -2560,7 +2577,11 @@ impl Vec { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_confusables("push_back", "put", "append")] - pub fn push(&mut self, value: T) { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub const fn push(&mut self, value: T) + where + A: [const] Allocator, + { let _ = self.push_mut(value); } @@ -2637,7 +2658,11 @@ impl Vec { #[inline] #[unstable(feature = "push_mut", issue = "135974")] #[must_use = "if you don't need a reference to the value, use `Vec::push` instead"] - pub fn push_mut(&mut self, value: T) -> &mut T { + #[rustc_const_unstable(feature = "const_heap", issue = "79597")] + pub const fn push_mut(&mut self, value: T) -> &mut T + where + A: [const] Allocator, + { // Inform codegen that the length does not change across grow_one(). let len = self.len; // This will panic or abort if we would allocate > isize::MAX bytes diff --git a/library/alloctests/lib.rs b/library/alloctests/lib.rs index 0201c8752210c..e55cb16a0b718 100644 --- a/library/alloctests/lib.rs +++ b/library/alloctests/lib.rs @@ -20,6 +20,13 @@ #![feature(assert_matches)] #![feature(char_internals)] #![feature(char_max_len)] +#![feature(const_alloc_error)] +#![feature(const_cmp)] +#![feature(const_convert)] +#![feature(const_destruct)] +#![feature(const_heap)] +#![feature(const_option_ops)] +#![feature(const_try)] #![feature(core_intrinsics)] #![feature(exact_size_is_empty)] #![feature(extend_one)] diff --git a/library/alloctests/tests/lib.rs b/library/alloctests/tests/lib.rs index 49fb21ef5f3ac..81c4f3dab6d20 100644 --- a/library/alloctests/tests/lib.rs +++ b/library/alloctests/tests/lib.rs @@ -1,5 +1,6 @@ #![feature(allocator_api)] #![feature(alloc_layout_extra)] +#![feature(const_heap)] #![feature(iter_array_chunks)] #![feature(assert_matches)] #![feature(wtf8_internals)] diff --git a/library/alloctests/tests/vec.rs b/library/alloctests/tests/vec.rs index ea334ab0f143a..06718e30e650c 100644 --- a/library/alloctests/tests/vec.rs +++ b/library/alloctests/tests/vec.rs @@ -2720,3 +2720,19 @@ fn vec_null_ptr_roundtrip() { let new = roundtripped.with_addr(ptr.addr()); unsafe { new.read() }; } + +#[test] +fn const_heap() { + const X: &'static [u32] = { + let mut v = Vec::with_capacity(6); + let mut x = 1; + while x < 42 { + v.push(x); + x *= 2; + } + assert!(v.len() == 6); + v.const_leak() + }; + + assert_eq!([1, 2, 4, 8, 16, 32], X); +} diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index 9d608d5e83c40..92df687e2cd88 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -102,6 +102,8 @@ impl fmt::Display for AllocError { /// /// [*currently allocated*]: #currently-allocated-memory #[unstable(feature = "allocator_api", issue = "32838")] +#[rustc_const_unstable(feature = "const_heap", issue = "79597")] +#[const_trait] pub unsafe trait Allocator { /// Attempts to allocate a block of memory. /// @@ -368,9 +370,10 @@ pub unsafe trait Allocator { } #[unstable(feature = "allocator_api", issue = "32838")] -unsafe impl Allocator for &A +#[rustc_const_unstable(feature = "const_heap", issue = "79597")] +unsafe impl const Allocator for &A where - A: Allocator + ?Sized, + A: [const] Allocator + ?Sized, { #[inline] fn allocate(&self, layout: Layout) -> Result, AllocError> { diff --git a/tests/ui/consts/const-eval/heap/vec-not-made-global.rs b/tests/ui/consts/const-eval/heap/vec-not-made-global.rs new file mode 100644 index 0000000000000..4f78e977e4d59 --- /dev/null +++ b/tests/ui/consts/const-eval/heap/vec-not-made-global.rs @@ -0,0 +1,5 @@ +#![feature(const_heap)] +const V: Vec = Vec::with_capacity(1); +//~^ ERROR: encountered `const_allocate` pointer in final value that was not made global + +fn main() {} diff --git a/tests/ui/consts/const-eval/heap/vec-not-made-global.stderr b/tests/ui/consts/const-eval/heap/vec-not-made-global.stderr new file mode 100644 index 0000000000000..595cbeb8df26b --- /dev/null +++ b/tests/ui/consts/const-eval/heap/vec-not-made-global.stderr @@ -0,0 +1,10 @@ +error: encountered `const_allocate` pointer in final value that was not made global + --> $DIR/vec-not-made-global.rs:2:1 + | +LL | const V: Vec = Vec::with_capacity(1); + | ^^^^^^^^^^^^^^^^^ + | + = note: use `const_make_global` to turn allocated pointers into immutable globals before returning + +error: aborting due to 1 previous error +