@@ -177,6 +177,8 @@ impl<T, A: Allocator> RawVec<T, A> {
177
177
/// the returned `RawVec`.
178
178
#[ inline]
179
179
pub ( crate ) const fn new_in ( alloc : A ) -> Self {
180
+ // Check assumption made in `current_memory`
181
+ const { assert ! ( T :: LAYOUT . size( ) % T :: LAYOUT . align( ) == 0 ) } ;
180
182
Self { inner : RawVecInner :: new_in ( alloc, Alignment :: of :: < T > ( ) ) , _marker : PhantomData }
181
183
}
182
184
@@ -328,7 +330,8 @@ impl<T, A: Allocator> RawVec<T, A> {
328
330
#[ inline]
329
331
#[ track_caller]
330
332
pub ( crate ) fn reserve ( & mut self , len : usize , additional : usize ) {
331
- self . inner . reserve ( len, additional, T :: LAYOUT )
333
+ // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
334
+ unsafe { self . inner . reserve ( len, additional, T :: LAYOUT ) }
332
335
}
333
336
334
337
/// A specialized version of `self.reserve(len, 1)` which requires the
@@ -337,7 +340,8 @@ impl<T, A: Allocator> RawVec<T, A> {
337
340
#[ inline( never) ]
338
341
#[ track_caller]
339
342
pub ( crate ) fn grow_one ( & mut self ) {
340
- self . inner . grow_one ( T :: LAYOUT )
343
+ // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
344
+ unsafe { self . inner . grow_one ( T :: LAYOUT ) }
341
345
}
342
346
343
347
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
@@ -346,7 +350,8 @@ impl<T, A: Allocator> RawVec<T, A> {
346
350
len : usize ,
347
351
additional : usize ,
348
352
) -> Result < ( ) , TryReserveError > {
349
- self . inner . try_reserve ( len, additional, T :: LAYOUT )
353
+ // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
354
+ unsafe { self . inner . try_reserve ( len, additional, T :: LAYOUT ) }
350
355
}
351
356
352
357
/// Ensures that the buffer contains at least enough space to hold `len +
@@ -369,7 +374,8 @@ impl<T, A: Allocator> RawVec<T, A> {
369
374
#[ cfg( not( no_global_oom_handling) ) ]
370
375
#[ track_caller]
371
376
pub ( crate ) fn reserve_exact ( & mut self , len : usize , additional : usize ) {
372
- self . inner . reserve_exact ( len, additional, T :: LAYOUT )
377
+ // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
378
+ unsafe { self . inner . reserve_exact ( len, additional, T :: LAYOUT ) }
373
379
}
374
380
375
381
/// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
@@ -378,7 +384,8 @@ impl<T, A: Allocator> RawVec<T, A> {
378
384
len : usize ,
379
385
additional : usize ,
380
386
) -> Result < ( ) , TryReserveError > {
381
- self . inner . try_reserve_exact ( len, additional, T :: LAYOUT )
387
+ // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
388
+ unsafe { self . inner . try_reserve_exact ( len, additional, T :: LAYOUT ) }
382
389
}
383
390
384
391
/// Shrinks the buffer down to the specified capacity. If the given amount
@@ -395,7 +402,8 @@ impl<T, A: Allocator> RawVec<T, A> {
395
402
#[ track_caller]
396
403
#[ inline]
397
404
pub ( crate ) fn shrink_to_fit ( & mut self , cap : usize ) {
398
- self . inner . shrink_to_fit ( cap, T :: LAYOUT )
405
+ // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
406
+ unsafe { self . inner . shrink_to_fit ( cap, T :: LAYOUT ) }
399
407
}
400
408
}
401
409
@@ -518,8 +526,12 @@ impl<A: Allocator> RawVecInner<A> {
518
526
& self . alloc
519
527
}
520
528
529
+ /// # Safety
530
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
531
+ /// initially construct `self`
532
+ /// - `elem_layout`'s size must be a multiple of its alignment
521
533
#[ inline]
522
- fn current_memory ( & self , elem_layout : Layout ) -> Option < ( NonNull < u8 > , Layout ) > {
534
+ unsafe fn current_memory ( & self , elem_layout : Layout ) -> Option < ( NonNull < u8 > , Layout ) > {
523
535
if elem_layout. size ( ) == 0 || self . cap . as_inner ( ) == 0 {
524
536
None
525
537
} else {
@@ -535,48 +547,67 @@ impl<A: Allocator> RawVecInner<A> {
535
547
}
536
548
}
537
549
550
+ /// # Safety
551
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
552
+ /// initially construct `self`
553
+ /// - `elem_layout`'s size must be a multiple of its alignment
538
554
#[ cfg( not( no_global_oom_handling) ) ]
539
555
#[ inline]
540
556
#[ track_caller]
541
- fn reserve ( & mut self , len : usize , additional : usize , elem_layout : Layout ) {
557
+ unsafe fn reserve ( & mut self , len : usize , additional : usize , elem_layout : Layout ) {
542
558
// Callers expect this function to be very cheap when there is already sufficient capacity.
543
559
// Therefore, we move all the resizing and error-handling logic from grow_amortized and
544
560
// handle_reserve behind a call, while making sure that this function is likely to be
545
561
// inlined as just a comparison and a call if the comparison fails.
546
562
#[ cold]
547
- fn do_reserve_and_handle < A : Allocator > (
563
+ unsafe fn do_reserve_and_handle < A : Allocator > (
548
564
slf : & mut RawVecInner < A > ,
549
565
len : usize ,
550
566
additional : usize ,
551
567
elem_layout : Layout ,
552
568
) {
553
- if let Err ( err) = slf. grow_amortized ( len, additional, elem_layout) {
569
+ // SAFETY: Precondition passed to caller
570
+ if let Err ( err) = unsafe { slf. grow_amortized ( len, additional, elem_layout) } {
554
571
handle_error ( err) ;
555
572
}
556
573
}
557
574
558
575
if self . needs_to_grow ( len, additional, elem_layout) {
559
- do_reserve_and_handle ( self , len, additional, elem_layout) ;
576
+ unsafe {
577
+ do_reserve_and_handle ( self , len, additional, elem_layout) ;
578
+ }
560
579
}
561
580
}
562
581
582
+ /// # Safety
583
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
584
+ /// initially construct `self`
585
+ /// - `elem_layout`'s size must be a multiple of its alignment
563
586
#[ cfg( not( no_global_oom_handling) ) ]
564
587
#[ inline]
565
588
#[ track_caller]
566
- fn grow_one ( & mut self , elem_layout : Layout ) {
567
- if let Err ( err) = self . grow_amortized ( self . cap . as_inner ( ) , 1 , elem_layout) {
589
+ unsafe fn grow_one ( & mut self , elem_layout : Layout ) {
590
+ // SAFETY: Precondition passed to caller
591
+ if let Err ( err) = unsafe { self . grow_amortized ( self . cap . as_inner ( ) , 1 , elem_layout) } {
568
592
handle_error ( err) ;
569
593
}
570
594
}
571
595
572
- fn try_reserve (
596
+ /// # Safety
597
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
598
+ /// initially construct `self`
599
+ /// - `elem_layout`'s size must be a multiple of its alignment
600
+ unsafe fn try_reserve (
573
601
& mut self ,
574
602
len : usize ,
575
603
additional : usize ,
576
604
elem_layout : Layout ,
577
605
) -> Result < ( ) , TryReserveError > {
578
606
if self . needs_to_grow ( len, additional, elem_layout) {
579
- self . grow_amortized ( len, additional, elem_layout) ?;
607
+ // SAFETY: Precondition passed to caller
608
+ unsafe {
609
+ self . grow_amortized ( len, additional, elem_layout) ?;
610
+ }
580
611
}
581
612
unsafe {
582
613
// Inform the optimizer that the reservation has succeeded or wasn't needed
@@ -585,22 +616,34 @@ impl<A: Allocator> RawVecInner<A> {
585
616
Ok ( ( ) )
586
617
}
587
618
619
+ /// # Safety
620
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
621
+ /// initially construct `self`
622
+ /// - `elem_layout`'s size must be a multiple of its alignment
588
623
#[ cfg( not( no_global_oom_handling) ) ]
589
624
#[ track_caller]
590
- fn reserve_exact ( & mut self , len : usize , additional : usize , elem_layout : Layout ) {
591
- if let Err ( err) = self . try_reserve_exact ( len, additional, elem_layout) {
625
+ unsafe fn reserve_exact ( & mut self , len : usize , additional : usize , elem_layout : Layout ) {
626
+ // SAFETY: Precondition passed to caller
627
+ if let Err ( err) = unsafe { self . try_reserve_exact ( len, additional, elem_layout) } {
592
628
handle_error ( err) ;
593
629
}
594
630
}
595
631
596
- fn try_reserve_exact (
632
+ /// # Safety
633
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
634
+ /// initially construct `self`
635
+ /// - `elem_layout`'s size must be a multiple of its alignment
636
+ unsafe fn try_reserve_exact (
597
637
& mut self ,
598
638
len : usize ,
599
639
additional : usize ,
600
640
elem_layout : Layout ,
601
641
) -> Result < ( ) , TryReserveError > {
602
642
if self . needs_to_grow ( len, additional, elem_layout) {
603
- self . grow_exact ( len, additional, elem_layout) ?;
643
+ // SAFETY: Precondition passed to caller
644
+ unsafe {
645
+ self . grow_exact ( len, additional, elem_layout) ?;
646
+ }
604
647
}
605
648
unsafe {
606
649
// Inform the optimizer that the reservation has succeeded or wasn't needed
@@ -609,11 +652,16 @@ impl<A: Allocator> RawVecInner<A> {
609
652
Ok ( ( ) )
610
653
}
611
654
655
+ /// # Safety
656
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
657
+ /// initially construct `self`
658
+ /// - `elem_layout`'s size must be a multiple of its alignment
659
+ /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())`
612
660
#[ cfg( not( no_global_oom_handling) ) ]
613
661
#[ inline]
614
662
#[ track_caller]
615
- fn shrink_to_fit ( & mut self , cap : usize , elem_layout : Layout ) {
616
- if let Err ( err) = self . shrink ( cap, elem_layout) {
663
+ unsafe fn shrink_to_fit ( & mut self , cap : usize , elem_layout : Layout ) {
664
+ if let Err ( err) = unsafe { self . shrink ( cap, elem_layout) } {
617
665
handle_error ( err) ;
618
666
}
619
667
}
@@ -632,7 +680,13 @@ impl<A: Allocator> RawVecInner<A> {
632
680
self . cap = unsafe { Cap :: new_unchecked ( cap) } ;
633
681
}
634
682
635
- fn grow_amortized (
683
+ /// # Safety
684
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
685
+ /// initially construct `self`
686
+ /// - `elem_layout`'s size must be a multiple of its alignment
687
+ /// - The sum of `len` and `additional` must be greater than or equal to
688
+ /// `self.capacity(elem_layout.size())`
689
+ unsafe fn grow_amortized (
636
690
& mut self ,
637
691
len : usize ,
638
692
additional : usize ,
@@ -657,14 +711,25 @@ impl<A: Allocator> RawVecInner<A> {
657
711
658
712
let new_layout = layout_array ( cap, elem_layout) ?;
659
713
660
- let ptr = finish_grow ( new_layout, self . current_memory ( elem_layout) , & mut self . alloc ) ?;
661
- // SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items
714
+ // SAFETY:
715
+ // - For the `current_memory` call: Precondition passed to caller
716
+ // - For the `finish_grow` call: Precondition passed to caller
717
+ // + `current_memory` does the right thing
718
+ let ptr =
719
+ unsafe { finish_grow ( new_layout, self . current_memory ( elem_layout) , & mut self . alloc ) ? } ;
662
720
721
+ // SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items
663
722
unsafe { self . set_ptr_and_cap ( ptr, cap) } ;
664
723
Ok ( ( ) )
665
724
}
666
725
667
- fn grow_exact (
726
+ /// # Safety
727
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
728
+ /// initially construct `self`
729
+ /// - `elem_layout`'s size must be a multiple of its alignment
730
+ /// - The sum of `len` and `additional` must be greater than or equal to
731
+ /// `self.capacity(elem_layout.size())`
732
+ unsafe fn grow_exact (
668
733
& mut self ,
669
734
len : usize ,
670
735
additional : usize ,
@@ -679,17 +744,27 @@ impl<A: Allocator> RawVecInner<A> {
679
744
let cap = len. checked_add ( additional) . ok_or ( CapacityOverflow ) ?;
680
745
let new_layout = layout_array ( cap, elem_layout) ?;
681
746
682
- let ptr = finish_grow ( new_layout, self . current_memory ( elem_layout) , & mut self . alloc ) ?;
747
+ // SAFETY:
748
+ // - For the `current_memory` call: Precondition passed to caller
749
+ // - For the `finish_grow` call: Precondition passed to caller
750
+ // + `current_memory` does the right thing
751
+ let ptr =
752
+ unsafe { finish_grow ( new_layout, self . current_memory ( elem_layout) , & mut self . alloc ) ? } ;
683
753
// SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items
684
754
unsafe {
685
755
self . set_ptr_and_cap ( ptr, cap) ;
686
756
}
687
757
Ok ( ( ) )
688
758
}
689
759
760
+ /// # Safety
761
+ /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
762
+ /// initially construct `self`
763
+ /// - `elem_layout`'s size must be a multiple of its alignment
764
+ /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())`
690
765
#[ cfg( not( no_global_oom_handling) ) ]
691
766
#[ inline]
692
- fn shrink ( & mut self , cap : usize , elem_layout : Layout ) -> Result < ( ) , TryReserveError > {
767
+ unsafe fn shrink ( & mut self , cap : usize , elem_layout : Layout ) -> Result < ( ) , TryReserveError > {
693
768
assert ! ( cap <= self . capacity( elem_layout. size( ) ) , "Tried to shrink to a larger capacity" ) ;
694
769
// SAFETY: Just checked this isn't trying to grow
695
770
unsafe { self . shrink_unchecked ( cap, elem_layout) }
@@ -711,8 +786,12 @@ impl<A: Allocator> RawVecInner<A> {
711
786
cap : usize ,
712
787
elem_layout : Layout ,
713
788
) -> Result < ( ) , TryReserveError > {
714
- let ( ptr, layout) =
715
- if let Some ( mem) = self . current_memory ( elem_layout) { mem } else { return Ok ( ( ) ) } ;
789
+ // SAFETY: Precondition passed to caller
790
+ let ( ptr, layout) = if let Some ( mem) = unsafe { self . current_memory ( elem_layout) } {
791
+ mem
792
+ } else {
793
+ return Ok ( ( ) ) ;
794
+ } ;
716
795
717
796
// If shrinking to 0, deallocate the buffer. We don't reach this point
718
797
// for the T::IS_ZST case since current_memory() will have returned
@@ -748,18 +827,26 @@ impl<A: Allocator> RawVecInner<A> {
748
827
/// Ideally this function would take `self` by move, but it cannot because it exists to be
749
828
/// called from a `Drop` impl.
750
829
unsafe fn deallocate ( & mut self , elem_layout : Layout ) {
751
- if let Some ( ( ptr, layout) ) = self . current_memory ( elem_layout) {
830
+ // SAFETY: Precondition passed to caller
831
+ if let Some ( ( ptr, layout) ) = unsafe { self . current_memory ( elem_layout) } {
752
832
unsafe {
753
833
self . alloc . deallocate ( ptr, layout) ;
754
834
}
755
835
}
756
836
}
757
837
}
758
838
839
+ /// # Safety
840
+ /// If `current_memory` matches `Some((ptr, old_layout))`:
841
+ /// - `ptr` must denote a block of memory *currently allocated* via `alloc`
842
+ /// - `old_layout` must *fit* that block of memory
843
+ /// - `new_layout` must have the same alignment as `old_layout`
844
+ /// - `new_layout.size()` must be greater than or equal to `old_layout.size()`
845
+ /// If `current_memory` is `None`, this function is safe.
759
846
// not marked inline(never) since we want optimizers to be able to observe the specifics of this
760
847
// function, see tests/codegen-llvm/vec-reserve-extend.rs.
761
848
#[ cold]
762
- fn finish_grow < A > (
849
+ unsafe fn finish_grow < A > (
763
850
new_layout : Layout ,
764
851
current_memory : Option < ( NonNull < u8 > , Layout ) > ,
765
852
alloc : & mut A ,
0 commit comments