diff --git a/src/adaptors/alloc.rs b/src/adaptors/alloc.rs index ff863d2..6142b4c 100644 --- a/src/adaptors/alloc.rs +++ b/src/adaptors/alloc.rs @@ -3,14 +3,14 @@ macro_rules! alloc_ref { () => { #[inline(always)] - fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { + fn alloc(&self, layout: Layout) -> Result, AllocError> { let size = layout.size(); let ptr = unsafe { self.alloc_alloc_zeroed(layout) }?; Ok(NonNull::slice_from_raw_parts(ptr, size)) } #[inline(always)] - unsafe fn dealloc(&mut self, ptr: MemoryAddress, layout: Layout) { + unsafe fn dealloc(&self, ptr: MemoryAddress, layout: Layout) { self.alloc_dealloc(ptr, layout) } }; diff --git a/src/adaptors/alloc_to_allocator_adaptor.rs b/src/adaptors/alloc_to_allocator_adaptor.rs index 4b09a84..a2b3988 100644 --- a/src/adaptors/alloc_to_allocator_adaptor.rs +++ b/src/adaptors/alloc_to_allocator_adaptor.rs @@ -1,7 +1,7 @@ use super::extensions::prelude::*; use crate::allocators::allocator::Allocator; use crate::memory_address::MemoryAddress; -use std::alloc::{AllocErr, GlobalAlloc, Layout}; +use std::alloc::{AllocError, GlobalAlloc, Layout}; use std::cell::UnsafeCell; use std::fmt; use std::fmt::Debug; @@ -33,12 +33,12 @@ impl Allocator for AllocToAllocatorAdaptor { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { NonNull::new(unsafe { self.mutable_reference() .alloc(Self::layout(non_zero_size, non_zero_power_of_two_alignment)) }) - .ok_or(AllocErr) + .ok_or(AllocError) } #[inline(always)] @@ -63,7 +63,7 @@ impl Allocator for AllocToAllocatorAdaptor { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { NonNull::new(unsafe { self.mutable_reference().realloc( current_memory.as_ptr(), @@ -71,7 +71,7 @@ impl Allocator for AllocToAllocatorAdaptor { non_zero_new_size.get(), ) }) - .ok_or(AllocErr) + .ok_or(AllocError) } #[inline(always)] @@ -81,7 +81,7 @@ impl Allocator for AllocToAllocatorAdaptor { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { NonNull::new(unsafe { self.mutable_reference().realloc( current_memory.as_ptr(), @@ -89,7 +89,7 @@ impl Allocator for AllocToAllocatorAdaptor { non_zero_new_size.get(), ) }) - .ok_or(AllocErr) + .ok_or(AllocError) } } diff --git a/src/adaptors/allocator_adaptor.rs b/src/adaptors/allocator_adaptor.rs index 0f1258c..3219dac 100644 --- a/src/adaptors/allocator_adaptor.rs +++ b/src/adaptors/allocator_adaptor.rs @@ -1,7 +1,7 @@ use crate::allocators::allocator::Allocator; use crate::memory_address::MemoryAddress; use core::ptr::NonNull; -use std::alloc::{AllocErr, AllocRef, GlobalAlloc, Layout}; +use std::alloc::{AllocError, AllocRef, GlobalAlloc, Layout}; use std::ops::Deref; use std::num::NonZeroUsize; @@ -44,14 +44,14 @@ unsafe impl<'a, A: 'a + Allocator> GlobalAlloc for AllocatorAdaptor<'a, A> { unsafe impl<'a, A: 'a + Allocator> AllocRef for AllocatorAdaptor<'a, A> { #[inline(always)] - fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { + fn alloc(&self, layout: Layout) -> Result, AllocError> { let size = layout.size(); let ptr = unsafe { self.alloc_alloc_zeroed(layout) }?; Ok(NonNull::slice_from_raw_parts(ptr, size)) } #[inline(always)] - unsafe fn dealloc(&mut self, ptr: MemoryAddress, layout: Layout) { + unsafe fn dealloc(&self, ptr: MemoryAddress, layout: Layout) { self.alloc_dealloc(ptr, layout) } } @@ -62,7 +62,7 @@ impl<'a, A: 'a + Allocator> Allocator for AllocatorAdaptor<'a, A> { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { self.0 .allocate(non_zero_size, non_zero_power_of_two_alignment) } @@ -88,7 +88,7 @@ impl<'a, A: 'a + Allocator> Allocator for AllocatorAdaptor<'a, A> { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { self.0.growing_reallocate( non_zero_new_size, non_zero_power_of_two_alignment, @@ -104,7 +104,7 @@ impl<'a, A: 'a + Allocator> Allocator for AllocatorAdaptor<'a, A> { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { self.0.shrinking_reallocate( non_zero_new_size, non_zero_power_of_two_alignment, diff --git a/src/adaptors/global_alloc_to_allocator_adaptor.rs b/src/adaptors/global_alloc_to_allocator_adaptor.rs index 481f741..782813b 100644 --- a/src/adaptors/global_alloc_to_allocator_adaptor.rs +++ b/src/adaptors/global_alloc_to_allocator_adaptor.rs @@ -1,6 +1,6 @@ use crate::allocators::allocator::Allocator; use crate::memory_address::MemoryAddress; -use std::alloc::{AllocErr, GlobalAlloc, Layout}; +use std::alloc::{AllocError, GlobalAlloc, Layout}; use std::fmt; use std::fmt::Debug; use std::fmt::Formatter; @@ -32,7 +32,7 @@ impl Allocator for GlobalAllocToAllocatorAdaptor { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { unsafe { transmute( self.0 @@ -63,7 +63,7 @@ impl Allocator for GlobalAllocToAllocatorAdaptor { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { unsafe { transmute(self.0.realloc( current_memory.as_ptr(), @@ -80,7 +80,7 @@ impl Allocator for GlobalAllocToAllocatorAdaptor { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { unsafe { transmute(self.0.realloc( current_memory.as_ptr(), diff --git a/src/allocators/allocator.rs b/src/allocators/allocator.rs index 0ea4797..847a37f 100644 --- a/src/allocators/allocator.rs +++ b/src/allocators/allocator.rs @@ -4,7 +4,7 @@ use crate::extensions::non_null_pointer::non_null_pointer; use crate::extensions::prelude::*; use crate::extensions::usize_ext::UsizeExt; use crate::memory_address::MemoryAddress; -use std::alloc::{AllocErr, Layout}; +use std::alloc::{AllocError, Layout}; use std::fmt::Debug; use std::intrinsics::transmute; use std::num::NonZeroUsize; @@ -20,7 +20,7 @@ pub trait Allocator: Debug + Sized { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result; + ) -> Result; /// Deallocate (free) memory. /// @@ -42,7 +42,7 @@ pub trait Allocator: Debug + Sized { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result; + ) -> Result; /// Reallocate memory by shrinking it. /// @@ -54,7 +54,7 @@ pub trait Allocator: Debug + Sized { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result; + ) -> Result; /// Adapts to a `GlobalAlloc` and `Alloc`. #[inline(always)] @@ -70,7 +70,7 @@ pub trait Allocator: Debug + Sized { #[doc(hidden)] #[inline(always)] - fn allocate_zeroed(&self, layout: Layout) -> Result { + fn allocate_zeroed(&self, layout: Layout) -> Result { let maybe_zero_size = layout.size(); if unlikely!(maybe_zero_size == 0) { @@ -81,7 +81,7 @@ pub trait Allocator: Debug + Sized { let non_zero_align = layout.align().non_zero(); let result = self.allocate(non_zero_size, non_zero_align); - // NOTE: AllocErr does not implement `Copy`, but is zero-sized - seems like a Rust API oversight. + // NOTE: AllocError does not implement `Copy`, but is zero-sized - seems like a Rust API oversight. // Hence the logic transmuting it to a pointer (for an efficient null check), then back to a result. let pointer = unsafe { transmute::<_, *mut u8>(result) }; @@ -99,7 +99,7 @@ pub trait Allocator: Debug + Sized { current_memory: MemoryAddress, layout: Layout, new_size: usize, - ) -> Result { + ) -> Result { let current_size = layout.size(); if unlikely!(current_size == new_size) { @@ -197,7 +197,7 @@ pub trait Allocator: Debug + Sized { #[doc(hidden)] #[inline(always)] - unsafe fn alloc_alloc(&self, layout: Layout) -> Result { + unsafe fn alloc_alloc(&self, layout: Layout) -> Result { if unlikely!(layout.size() == 0) { return Ok(Self::ZERO_SIZED_ALLOCATION); } @@ -208,7 +208,7 @@ pub trait Allocator: Debug + Sized { #[doc(hidden)] #[inline(always)] - unsafe fn alloc_alloc_zeroed(&self, layout: Layout) -> Result { + unsafe fn alloc_alloc_zeroed(&self, layout: Layout) -> Result { self.allocate_zeroed(layout) } @@ -234,7 +234,7 @@ pub trait Allocator: Debug + Sized { ptr: MemoryAddress, layout: Layout, new_size: usize, - ) -> Result { + ) -> Result { self.reallocate(ptr, layout, new_size) } } diff --git a/src/allocators/bit_set/bit_set_allocator.rs b/src/allocators/bit_set/bit_set_allocator.rs index 1f96a02..578883c 100644 --- a/src/allocators/bit_set/bit_set_allocator.rs +++ b/src/allocators/bit_set/bit_set_allocator.rs @@ -11,7 +11,7 @@ use crate::extensions::prelude::*; use crate::memory_address::MemoryAddress; use crate::memory_sources::memory_source::MemorySource; use either::*; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::cell::Cell; use std::num::NonZeroUsize; @@ -45,7 +45,7 @@ impl Allocator for BitSetAllocator { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { let number_of_bits_required = self.number_of_bits_required(non_zero_size); let power_of_two_exponent = if self @@ -60,7 +60,7 @@ impl Allocator for BitSetAllocator { let alignment_exceeds_that_which_can_be_accommodated_in_one_bit_set_word = power_of_two_exponent > BitSetWord::SIZE_IN_BITS; if unlikely!(alignment_exceeds_that_which_can_be_accommodated_in_one_bit_set_word) { - return Err(AllocErr); + return Err(AllocError); } power_of_two_exponent @@ -143,7 +143,7 @@ impl Allocator for BitSetAllocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { let current_number_of_bits_required = self.number_of_bits_required(non_zero_current_size); let new_number_of_bits_required = self.number_of_bits_required(non_zero_new_size); @@ -192,7 +192,7 @@ impl Allocator for BitSetAllocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { let current_number_of_bits_required = self.number_of_bits_required(non_zero_current_size); let new_number_of_bits_required = self.number_of_bits_required(non_zero_new_size); @@ -227,7 +227,7 @@ impl BitSetAllocator { pub fn new_by_amount_8( memory_source: MS, memory_source_size: NonZeroUsize, - ) -> Result { + ) -> Result { Self::new_by_amount(memory_source, 8usize.non_zero(), memory_source_size) } @@ -236,7 +236,7 @@ impl BitSetAllocator { pub fn new_by_amount_16( memory_source: MS, memory_source_size: NonZeroUsize, - ) -> Result { + ) -> Result { Self::new_by_amount(memory_source, 16usize.non_zero(), memory_source_size) } @@ -245,7 +245,7 @@ impl BitSetAllocator { pub fn new_by_amount_32( memory_source: MS, memory_source_size: NonZeroUsize, - ) -> Result { + ) -> Result { Self::new_by_amount(memory_source, 32usize.non_zero(), memory_source_size) } @@ -255,7 +255,7 @@ impl BitSetAllocator { memory_source: MS, block_size: NonZeroUsize, memory_source_size: NonZeroUsize, - ) -> Result { + ) -> Result { let number_of_blocks = ((memory_source_size.get() + (block_size.get() - 1)) / block_size.get()).non_zero(); @@ -268,7 +268,7 @@ impl BitSetAllocator { memory_source: MS, block_size: NonZeroUsize, number_of_blocks: NonZeroUsize, - ) -> Result { + ) -> Result { debug_assert!( block_size.is_power_of_two(), "block_size `{:?}` must be a power of 2", @@ -342,7 +342,7 @@ impl BitSetAllocator { &self, number_of_bits_required: NumberOfBits, power_of_two_exponent: usize, - ) -> Result { + ) -> Result { debug_assert!(number_of_bits_required.is_not_zero()); macro_rules! scan @@ -404,7 +404,7 @@ impl BitSetAllocator { callback ); - Err(AllocErr) + Err(AllocError) } #[inline(always)] diff --git a/src/allocators/bump_allocator.rs b/src/allocators/bump_allocator.rs index 33152c8..8f58770 100644 --- a/src/allocators/bump_allocator.rs +++ b/src/allocators/bump_allocator.rs @@ -6,7 +6,7 @@ use crate::extensions::non_zero_usize::non_zero_usize; use crate::extensions::prelude::*; use crate::memory_address::MemoryAddress; use crate::memory_sources::memory_source::MemorySource; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::cell::Cell; use std::fmt::Debug; use std::intrinsics::transmute; @@ -55,14 +55,14 @@ macro_rules! allocation_ends_at_pointer let pointer: *mut u8 = unsafe { transmute($allocation_from.checked_add(size)) }; if unlikely!(pointer.is_null()) { - return Err(AllocErr) + return Err(AllocError) } unsafe { transmute(pointer) } }; if unlikely!(allocation_ends_at_pointer > $self.ends_at_pointer) { - return Err(AllocErr) + return Err(AllocError) } allocation_ends_at_pointer @@ -76,7 +76,7 @@ impl Allocator for BumpAllocator { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { debug_assert!( non_zero_power_of_two_alignment <= Self::MAXIMUM_POWER_OF_TWO_ALIGNMENT, "non_zero_power_of_two_alignment `{}` exceeds `{}`", @@ -121,7 +121,7 @@ impl Allocator for BumpAllocator { _non_zero_power_of_two_alignment: NonZeroUsize, _non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { if unlikely!(current_memory == self.most_recent_allocation_pointer.get()) { let size = non_zero_new_size.get(); self.next_allocation_at_pointer @@ -138,7 +138,7 @@ impl Allocator for BumpAllocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { if unlikely!(current_memory == self.most_recent_allocation_pointer.get()) { self.next_allocation_at_pointer .set(allocation_ends_at_pointer!( @@ -151,7 +151,7 @@ impl Allocator for BumpAllocator { let result = self.allocate(non_zero_new_size, non_zero_power_of_two_alignment); let pointer: *mut u8 = unsafe { transmute(result) }; if unlikely!(pointer.is_null()) { - Err(AllocErr) + Err(AllocError) } else { let current_size = non_zero_current_size.get(); unsafe { pointer.copy_from(current_memory.as_ptr(), current_size) }; @@ -173,7 +173,7 @@ impl BumpAllocator { /// New instance wrapping a block of memory. #[inline(always)] - pub fn new(memory_source: MS, memory_source_size: NonZeroUsize) -> Result { + pub fn new(memory_source: MS, memory_source_size: NonZeroUsize) -> Result { let allocations_start_from = memory_source.obtain(memory_source_size)?; Ok(Self { diff --git a/src/allocators/context_allocator.rs b/src/allocators/context_allocator.rs index 695fd47..8d15fa8 100644 --- a/src/allocators/context_allocator.rs +++ b/src/allocators/context_allocator.rs @@ -7,7 +7,7 @@ use crate::allocators::multiple_binary_search_tree_allocator::MultipleBinarySear use crate::memory_address::MemoryAddress; use crate::memory_sources::memory_source::MemorySource; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::fmt::Debug; use std::num::NonZeroUsize; @@ -40,7 +40,7 @@ impl Allocator for ContextAllocator { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { use self::ContextAllocator::*; match *self { @@ -95,7 +95,7 @@ impl Allocator for ContextAllocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { use self::ContextAllocator::*; match *self { @@ -129,7 +129,7 @@ impl Allocator for ContextAllocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { use self::ContextAllocator::*; match *self { diff --git a/src/allocators/global/switchable_allocator.rs b/src/allocators/global/switchable_allocator.rs index 736b764..3dadaf8 100644 --- a/src/allocators/global/switchable_allocator.rs +++ b/src/allocators/global/switchable_allocator.rs @@ -43,7 +43,7 @@ macro_rules! switchable_allocator { /// Std imports use std::num::NonZeroUsize; - use std::alloc::{AllocRef, AllocErr, GlobalAlloc, Layout, System}; + use std::alloc::{AllocRef, AllocError, GlobalAlloc, Layout, System}; use std::mem::replace; /// Effectively this is a field of `SwitchableAllocator` with a different value for each thread. @@ -76,7 +76,7 @@ macro_rules! switchable_allocator { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { use allocator_suite::allocators::global::current_allocator_in_use::CurrentAllocatorInUse::*; match self.save_current_allocator_in_use() { @@ -120,7 +120,7 @@ macro_rules! switchable_allocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { choose_allocator!( self, current_memory, @@ -139,7 +139,7 @@ macro_rules! switchable_allocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { choose_allocator!( self, current_memory, diff --git a/src/allocators/memory_map_allocator.rs b/src/allocators/memory_map_allocator.rs index 9151ea3..6ce3f98 100644 --- a/src/allocators/memory_map_allocator.rs +++ b/src/allocators/memory_map_allocator.rs @@ -2,7 +2,7 @@ use crate::allocators::allocator::Allocator; use crate::memory_address::MemoryAddress; use crate::memory_sources::mmap::memory_map_source::MemoryMapSource; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::fmt::Debug; use std::num::NonZeroUsize; @@ -24,11 +24,11 @@ impl Allocator for MemoryMapAllocator { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { const ASSUMED_PAGE_SIZE: usize = 4096; if unlikely!(non_zero_power_of_two_alignment.get() > ASSUMED_PAGE_SIZE) { - return Err(AllocErr); + return Err(AllocError); } self.0.mmap_memory(non_zero_size.get()) @@ -51,7 +51,7 @@ impl Allocator for MemoryMapAllocator { _non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { self.0.mremap_memory( current_memory, non_zero_current_size.get(), @@ -66,7 +66,7 @@ impl Allocator for MemoryMapAllocator { _non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { self.0.mremap_memory( current_memory, non_zero_current_size.get(), diff --git a/src/allocators/multiple_binary_search_tree_allocator.rs b/src/allocators/multiple_binary_search_tree_allocator.rs index 6231cbe..03d6d02 100644 --- a/src/allocators/multiple_binary_search_tree_allocator.rs +++ b/src/allocators/multiple_binary_search_tree_allocator.rs @@ -1,7 +1,7 @@ use crate::allocators::global::local_allocator::LocalAllocator; use crate::memory_address::MemoryAddress; use crate::memory_sources::memory_source::MemorySource; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::fmt; use std::fmt::Formatter; use std::num::NonZeroUsize; @@ -60,7 +60,7 @@ impl Allocator for MultipleBinarySearchTreeAllocator { &self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, - ) -> Result { + ) -> Result { macro_rules! try_to_allocate_exact_size_block { ($node_pointer: ident, $is_cached_first_child: expr, $non_zero_power_of_two_alignment: ident, $binary_search_tree: ident, $_block_size: ident, $_exact_block_size: ident, $_self: ident) => {{ let memory_address = $node_pointer.value(); @@ -139,7 +139,7 @@ impl Allocator for MultipleBinarySearchTreeAllocator { non_zero_size ) ) { - return Err(AllocErr); + return Err(AllocError); } if unlikely!( @@ -147,7 +147,7 @@ impl Allocator for MultipleBinarySearchTreeAllocator { non_zero_power_of_two_alignment ) ) { - return Err(AllocErr); + return Err(AllocError); } // (1) Try to satisfy allocation from a binary search tree of blocks of the same size. @@ -188,7 +188,7 @@ impl Allocator for MultipleBinarySearchTreeAllocator { ); } - Err(AllocErr) + Err(AllocError) } #[inline(always)] @@ -219,7 +219,7 @@ impl Allocator for MultipleBinarySearchTreeAllocator { non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result, AllocErr> { + ) -> Result, AllocError> { debug_assert!( non_zero_new_size > non_zero_current_size, "non_zero_new_size `{}` should be greater than non_zero_current_size `{}`", @@ -274,7 +274,7 @@ impl Allocator for MultipleBinarySearchTreeAllocator { _non_zero_power_of_two_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, current_memory: MemoryAddress, - ) -> Result, AllocErr> { + ) -> Result, AllocError> { debug_assert!( non_zero_new_size < non_zero_current_size, "non_zero_new_size `{}` should be less than non_zero_current_size `{}`", @@ -311,7 +311,7 @@ impl MultipleBinarySearchTreeAllocator { /// The provided memory must be at least as long as the minimum block size. /// /// The memory must be aligned to `BinarySearchTreesWithCachedKnowledgeOfFirstChild::MinimumAlignment`, which is the same as the size of a `Node`. - pub fn new(memory_source: MS, memory_source_size: NonZeroUsize) -> Result { + pub fn new(memory_source: MS, memory_source_size: NonZeroUsize) -> Result { debug_assert_ne!( BinarySearchTreesWithCachedKnowledgeOfFirstChild::NUMBER_OF_BINARY_SEARCH_TREES, 0, diff --git a/src/memory_sources/arena_memory_source/arena_memory_source.rs b/src/memory_sources/arena_memory_source/arena_memory_source.rs index c329182..4b7df20 100644 --- a/src/memory_sources/arena_memory_source/arena_memory_source.rs +++ b/src/memory_sources/arena_memory_source/arena_memory_source.rs @@ -5,7 +5,7 @@ use crate::memory_address::MemoryAddress; use crate::memory_sources::arena_memory_source::slot_index::SlotIndex; use crate::memory_sources::arena_memory_source::unallocated_block::UnallocatedBlock; use crate::memory_sources::memory_source::MemorySource; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::cell::Cell; use std::num::NonZeroUsize; @@ -33,13 +33,13 @@ impl Drop for ArenaMemorySource { impl MemorySource for ArenaMemorySource { #[inline(always)] - fn obtain(&self, non_zero_size: NonZeroUsize) -> Result { + fn obtain(&self, non_zero_size: NonZeroUsize) -> Result { debug_assert!(non_zero_size <= self.block_size); let next_available_slot_index = self.next_available_slot_index.get(); if unlikely!(next_available_slot_index.is_fully_allocated()) { - return Err(AllocErr); + return Err(AllocError); } let unallocated_block = self.unallocated_block(next_available_slot_index); @@ -70,7 +70,7 @@ impl ArenaMemorySource { block_size: NonZeroUsize, memory_source_size: NonZeroUsize, block_initializer: impl Fn(MemoryAddress, NonZeroUsize), - ) -> Result { + ) -> Result { let number_of_blocks = ((memory_source_size.get() + (block_size.get() - 1)) / block_size.get()).non_zero(); @@ -92,7 +92,7 @@ impl ArenaMemorySource { block_size: NonZeroUsize, number_of_blocks: NonZeroUsize, block_initializer: impl Fn(MemoryAddress, NonZeroUsize), - ) -> Result { + ) -> Result { let memory_source_size = block_size.multiply(number_of_blocks); let allocations_start_from = memory_source.obtain(memory_source_size)?; diff --git a/src/memory_sources/memory_source.rs b/src/memory_sources/memory_source.rs index 5937b4b..58d4970 100644 --- a/src/memory_sources/memory_source.rs +++ b/src/memory_sources/memory_source.rs @@ -1,5 +1,5 @@ use crate::memory_address::MemoryAddress; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::fmt::Debug; use std::num::NonZeroUsize; @@ -10,7 +10,7 @@ pub trait MemorySource: Debug { /// Obtain memory from the operating system, say. /// /// Alignment will be whatever is appropriate, but is likely to be quite large. - fn obtain(&self, non_zero_size: NonZeroUsize) -> Result; + fn obtain(&self, non_zero_size: NonZeroUsize) -> Result; /// Release memory to the operating system, say. /// diff --git a/src/memory_sources/mmap/memory_map_source.rs b/src/memory_sources/mmap/memory_map_source.rs index 921737a..690a24a 100644 --- a/src/memory_sources/mmap/memory_map_source.rs +++ b/src/memory_sources/mmap/memory_map_source.rs @@ -5,7 +5,7 @@ use crate::memory_sources::mmap::numa::prelude::*; use crate::memory_sources::mmap::prelude::*; #[cfg(unix)] use ::libc::*; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::num::NonZeroUsize; use std::ptr::null_mut; @@ -41,7 +41,7 @@ impl Default for MemoryMapSource { impl MemorySource for MemoryMapSource { #[inline(always)] - fn obtain(&self, non_zero_size: NonZeroUsize) -> Result { + fn obtain(&self, non_zero_size: NonZeroUsize) -> Result { self.mmap_memory(non_zero_size.get()) } @@ -95,7 +95,7 @@ impl MemoryMapSource { /// `size` is rounded up to system page size. #[inline(always)] - pub(crate) fn mmap_memory(&self, size: usize) -> Result { + pub(crate) fn mmap_memory(&self, size: usize) -> Result { const UNUSED_FILE_DESCRIPTOR: i32 = -1; const NO_OFFSET: i64 = 0; @@ -110,7 +110,7 @@ impl MemoryMapSource { ) }; if unlikely!(result == MAP_FAILED) { - Err(AllocErr) + Err(AllocError) } else { #[cfg(any(target_os = "android", target_os = "linux"))] self.madvise_memory(result, size)?; @@ -127,12 +127,12 @@ impl MemoryMapSource { #[cfg(any(target_os = "android", target_os = "linux"))] #[inline(always)] - fn madvise_memory(&self, address: *mut c_void, size: usize) -> Result<(), AllocErr> { + fn madvise_memory(&self, address: *mut c_void, size: usize) -> Result<(), AllocError> { let result = unsafe { madvise(address, size, self.madvise_flags) }; if likely!(result == 0) { } else if likely!(result == -1) { Self::munmap_memory(Self::cast_address(address), size); - return Err(AllocErr); + return Err(AllocError); } else { unreachable!() } @@ -141,7 +141,7 @@ impl MemoryMapSource { #[cfg(any(target_os = "android", target_os = "linux"))] #[inline(always)] - fn numa_memory(&self, address: *mut c_void, size: usize) -> Result<(), AllocErr> { + fn numa_memory(&self, address: *mut c_void, size: usize) -> Result<(), AllocError> { match self.numa_settings { None => Ok(()), @@ -149,7 +149,7 @@ impl MemoryMapSource { let outcome = numa_settings.post_allocate(address, size); if unlikely!(outcome.is_err()) { Self::munmap_memory(Self::cast_address(address), size); - return Err(AllocErr); + return Err(AllocError); } Ok(()) } @@ -158,13 +158,13 @@ impl MemoryMapSource { #[cfg(not(any(target_os = "android", target_os = "netbsd", target_os = "linux")))] #[inline(always)] - fn mlock_memory(&self, address: *mut c_void, size: usize) -> Result<(), AllocErr> { + fn mlock_memory(&self, address: *mut c_void, size: usize) -> Result<(), AllocError> { if self.lock { let result = unsafe { mlock(address, size) }; if likely!(result == 0) { } else if likely!(result == -1) { Self::munmap_memory(Self::cast_address(address), size); - return Err(AllocErr); + return Err(AllocError); } else { unreachable!() } @@ -180,7 +180,7 @@ impl MemoryMapSource { memory_address: MemoryAddress, old_size: usize, new_size: usize, - ) -> Result { + ) -> Result { #[cfg(target_os = "netbsd")] const MREMAP_MAYMOVE: i32 = 0; @@ -193,7 +193,7 @@ impl MemoryMapSource { ) }; if unlikely!(result == MAP_FAILED) { - Err(AllocErr) + Err(AllocError) } else { Ok(Self::cast_address(result)) } @@ -206,7 +206,7 @@ impl MemoryMapSource { memory_address: MemoryAddress, old_size: usize, new_size: usize, - ) -> Result { + ) -> Result { let new_memory_address = self.mmap_memory(new_size)?; unsafe { new_memory_address diff --git a/src/memory_sources/mmap/numa/numa_settings.rs b/src/memory_sources/mmap/numa/numa_settings.rs index 82790ab..f84e229 100644 --- a/src/memory_sources/mmap/numa/numa_settings.rs +++ b/src/memory_sources/mmap/numa/numa_settings.rs @@ -81,7 +81,7 @@ impl NumaSettings { pub(crate) fn post_allocate( &self, current_memory: MemoryAddress, - ) -> Result { + ) -> Result { Ok(current_memory) } diff --git a/src/memory_sources/rc_memory_source.rs b/src/memory_sources/rc_memory_source.rs index 48bfa33..bf0de1c 100644 --- a/src/memory_sources/rc_memory_source.rs +++ b/src/memory_sources/rc_memory_source.rs @@ -1,7 +1,7 @@ use crate::allocators::global::global_switchable_allocator::GlobalSwitchableAllocator; use crate::memory_address::MemoryAddress; use crate::memory_sources::memory_source::MemorySource; -use std::alloc::AllocErr; +use std::alloc::AllocError; use std::num::NonZeroUsize; use std::ops::Deref; use std::rc::Rc; @@ -30,7 +30,7 @@ impl Deref for RcMemorySource { impl MemorySource for RcMemorySource { #[inline(always)] - fn obtain(&self, non_zero_size: NonZeroUsize) -> Result { + fn obtain(&self, non_zero_size: NonZeroUsize) -> Result { self.0.obtain(non_zero_size) } diff --git a/tests/multiple_binary_search_tree_allocator_tests.rs b/tests/multiple_binary_search_tree_allocator_tests.rs index 37d73df..4e7d2d6 100644 --- a/tests/multiple_binary_search_tree_allocator_tests.rs +++ b/tests/multiple_binary_search_tree_allocator_tests.rs @@ -7,7 +7,7 @@ mod multiple_binary_search_tree_allocator_tests { use allocator_suite::extensions::usize_ext::UsizeExt; use allocator_suite::prelude::mmap::prelude::MemoryMapSource; - use std::alloc::AllocErr; + use std::alloc::AllocError; use allocator_suite::allocators::binary_search_trees::binary_search_trees_with_cached_knowledge_of_first_child::BinarySearchTreesWithCachedKnowledgeOfFirstChild; use allocator_suite::extensions::non_null_u8_ext::NonNullU8Ext; @@ -165,7 +165,7 @@ mod multiple_binary_search_tree_allocator_tests { fn assert_allocator_is_empty(allocator: &MultipleBinarySearchTreeAllocator) { assert_eq!( allocator.allocate(1.non_zero(), 1.non_zero()), - Err(AllocErr), + Err(AllocError), "Allocator was not empty" ); }