diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index e1124a68750ae..28068a8806096 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -1644,11 +1644,11 @@ impl IntoIter { &mut self, ) -> Option, marker::KV>> { if self.length == 0 { - self.range.deallocating_end(&self.alloc); + self.range.deallocating_end(self.alloc.clone()); None } else { self.length -= 1; - Some(unsafe { self.range.deallocating_next_unchecked(&self.alloc) }) + Some(unsafe { self.range.deallocating_next_unchecked(self.alloc.clone()) }) } } @@ -1658,11 +1658,11 @@ impl IntoIter { &mut self, ) -> Option, marker::KV>> { if self.length == 0 { - self.range.deallocating_end(&self.alloc); + self.range.deallocating_end(self.alloc.clone()); None } else { self.length -= 1; - Some(unsafe { self.range.deallocating_next_back_unchecked(&self.alloc) }) + Some(unsafe { self.range.deallocating_next_back_unchecked(self.alloc.clone()) }) } } } @@ -1849,7 +1849,7 @@ where type Item = (K, V); fn next(&mut self) -> Option<(K, V)> { - self.inner.next(&mut self.pred, &self.alloc) + self.inner.next(&mut self.pred, self.alloc.clone()) } fn size_hint(&self) -> (usize, Option) { diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs index bec3b9675254c..0d3fdc9019efd 100644 --- a/library/alloc/src/collections/btree/set.rs +++ b/library/alloc/src/collections/btree/set.rs @@ -1320,7 +1320,7 @@ where fn next(&mut self) -> Option { let pred = &mut self.pred; let mut mapped_pred = |k: &T, _v: &mut ()| pred(k); - self.inner.next(&mut mapped_pred, &self.alloc).map(|(k, _)| k) + self.inner.next(&mut mapped_pred, self.alloc.clone()).map(|(k, _)| k) } fn size_hint(&self) -> (usize, Option) { diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index d3879273f5b03..d554ec590358f 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -187,7 +187,7 @@ impl System { old_size => unsafe { let new_ptr = self.alloc_impl(new_layout, zeroed)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); - Allocator::deallocate(&self, ptr, old_layout); + Allocator::deallocate(self, ptr, old_layout); Ok(new_ptr) }, } @@ -254,7 +254,7 @@ unsafe impl Allocator for System { match new_layout.size() { // SAFETY: conditions must be upheld by the caller 0 => unsafe { - Allocator::deallocate(&self, ptr, old_layout); + Allocator::deallocate(self, ptr, old_layout); Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) }, @@ -274,9 +274,9 @@ unsafe impl Allocator for System { // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract // for `dealloc` must be upheld by the caller. new_size => unsafe { - let new_ptr = Allocator::allocate(&self, new_layout)?; + let new_ptr = Allocator::allocate(self, new_layout)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); - Allocator::deallocate(&self, ptr, old_layout); + Allocator::deallocate(self, ptr, old_layout); Ok(new_ptr) }, }