/
lib.rs
801 lines (713 loc) · 24.5 KB
/
lib.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The arena, a fast but limited type of allocator.
//!
//! Arenas are a type of allocator that destroy the objects within, all at
//! once, once the arena itself is destroyed. They do not support deallocation
//! of individual objects while the arena itself is still alive. The benefit
//! of an arena is very fast allocation; just a pointer bump.
//!
//! This crate has two arenas implemented: `TypedArena`, which is a simpler
//! arena but can only hold objects of a single type, and `Arena`, which is a
//! more complex, slower arena which can hold objects of any type.
#![crate_name = "arena"]
#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
test(no_crate_inject, attr(deny(warnings))))]
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(drop_in_place)]
#![feature(heap_api)]
#![feature(raw)]
#![feature(heap_api)]
#![feature(staged_api)]
#![feature(dropck_parametricity)]
#![cfg_attr(test, feature(test))]
extern crate alloc;
use std::cell::{Cell, RefCell};
use std::cmp;
use std::intrinsics;
use std::marker::{PhantomData, Send};
use std::mem;
use std::ptr;
use std::slice;
use alloc::heap;
use alloc::raw_vec::RawVec;
struct Chunk {
data: RawVec<u8>,
/// Index of the first unused byte.
fill: Cell<usize>,
/// Indicates whether objects with destructors are stored in this chunk.
is_copy: Cell<bool>,
}
impl Chunk {
fn new(size: usize, is_copy: bool) -> Chunk {
Chunk {
data: RawVec::with_capacity(size),
fill: Cell::new(0),
is_copy: Cell::new(is_copy),
}
}
fn capacity(&self) -> usize {
self.data.cap()
}
unsafe fn as_ptr(&self) -> *const u8 {
self.data.ptr()
}
// Walk down a chunk, running the destructors for any objects stored
// in it.
unsafe fn destroy(&self) {
let mut idx = 0;
let buf = self.as_ptr();
let fill = self.fill.get();
while idx < fill {
let tydesc_data = buf.offset(idx as isize) as *const usize;
let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
let (size, align) = ((*tydesc).size, (*tydesc).align);
let after_tydesc = idx + mem::size_of::<*const TyDesc>();
let start = round_up(after_tydesc, align);
if is_done {
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
}
// Find where the next tydesc lives
idx = round_up(start + size, mem::align_of::<*const TyDesc>());
}
}
}
/// A slower reflection-based arena that can allocate objects of any type.
///
/// This arena uses `RawVec<u8>` as a backing store to allocate objects from.
/// For each allocated object, the arena stores a pointer to the type descriptor
/// followed by the object (potentially with alignment padding after each
/// element). When the arena is destroyed, it iterates through all of its
/// chunks, and uses the tydesc information to trace through the objects,
/// calling the destructors on them. One subtle point that needs to be
/// addressed is how to handle panics while running the user provided
/// initializer function. It is important to not run the destructor on
/// uninitialized objects, but how to detect them is somewhat subtle. Since
/// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
/// the most recent object. To solve this without requiring extra space, we
/// use the low order bit of the tydesc pointer to encode whether the object
/// it describes has been fully initialized.
///
/// As an optimization, objects with destructors are stored in different chunks
/// than objects without destructors. This reduces overhead when initializing
/// plain-old-data (`Copy` types) and means we don't need to waste time running
/// their destructors.
pub struct Arena<'longer_than_self> {
// The heads are separated out from the list as a unbenchmarked
// microoptimization, to avoid needing to case on the list to access a head.
head: RefCell<Chunk>,
copy_head: RefCell<Chunk>,
chunks: RefCell<Vec<Chunk>>,
_marker: PhantomData<*mut &'longer_than_self ()>,
}
impl<'a> Arena<'a> {
/// Allocates a new Arena with 32 bytes preallocated.
pub fn new() -> Arena<'a> {
Arena::new_with_size(32)
}
/// Allocates a new Arena with `initial_size` bytes preallocated.
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
Arena {
head: RefCell::new(Chunk::new(initial_size, false)),
copy_head: RefCell::new(Chunk::new(initial_size, true)),
chunks: RefCell::new(Vec::new()),
_marker: PhantomData,
}
}
}
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
fn drop(&mut self) {
unsafe {
self.head.borrow().destroy();
for chunk in self.chunks.borrow().iter() {
if !chunk.is_copy.get() {
chunk.destroy();
}
}
}
}
}
#[inline]
fn round_up(base: usize, align: usize) -> usize {
(base.checked_add(align - 1)).unwrap() & !(align - 1)
}
// We encode whether the object a tydesc describes has been
// initialized in the arena in the low bit of the tydesc pointer. This
// is necessary in order to properly do cleanup if a panic occurs
// during an initializer.
#[inline]
fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
p as usize | (is_done as usize)
}
#[inline]
fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
((p & !1) as *const TyDesc, p & 1 == 1)
}
// HACK(eddyb) TyDesc replacement using a trait object vtable.
// This could be replaced in the future with a custom DST layout,
// or `&'static (drop_glue, size, align)` created by a `const fn`.
// Requirements:
// * rvalue promotion (issue #1056)
// * mem::{size_of, align_of} must be const fns
struct TyDesc {
drop_glue: fn(*const i8),
size: usize,
align: usize,
}
trait AllTypes {
fn dummy(&self) {}
}
impl<T: ?Sized> AllTypes for T {}
unsafe fn get_tydesc<T>() -> *const TyDesc {
use std::raw::TraitObject;
let ptr = &*(heap::EMPTY as *const T);
// Can use any trait that is implemented for all types.
let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
obj.vtable as *const TyDesc
}
impl<'longer_than_self> Arena<'longer_than_self> {
// Grows a given chunk and returns `false`, or replaces it with a bigger
// chunk and returns `true`.
// This method is shared by both parts of the arena.
#[cold]
fn alloc_grow(&self, head: &mut Chunk, used_cap: usize, n_bytes: usize) -> bool {
if head.data.reserve_in_place(used_cap, n_bytes) {
// In-place reallocation succeeded.
false
} else {
// Allocate a new chunk.
let new_min_chunk_size = cmp::max(n_bytes, head.capacity());
let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false);
let old_chunk = mem::replace(head, new_chunk);
if old_chunk.fill.get() != 0 {
self.chunks.borrow_mut().push(old_chunk);
}
true
}
}
// Functions for the copyable part of the arena.
#[inline]
fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
let mut copy_head = self.copy_head.borrow_mut();
let fill = copy_head.fill.get();
let mut start = round_up(fill, align);
let mut end = start + n_bytes;
if end > copy_head.capacity() {
if self.alloc_grow(&mut *copy_head, fill, end - fill) {
// Continuing with a newly allocated chunk
start = 0;
end = n_bytes;
copy_head.is_copy.set(true);
}
}
copy_head.fill.set(end);
unsafe { copy_head.as_ptr().offset(start as isize) }
}
#[inline]
fn alloc_copy<T, F>(&self, op: F) -> &mut T
where F: FnOnce() -> T
{
unsafe {
let ptr = self.alloc_copy_inner(mem::size_of::<T>(), mem::align_of::<T>());
let ptr = ptr as *mut T;
ptr::write(&mut (*ptr), op());
&mut *ptr
}
}
// Functions for the non-copyable part of the arena.
#[inline]
fn alloc_noncopy_inner(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
let mut head = self.head.borrow_mut();
let fill = head.fill.get();
let mut tydesc_start = fill;
let after_tydesc = fill + mem::size_of::<*const TyDesc>();
let mut start = round_up(after_tydesc, align);
let mut end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>());
if end > head.capacity() {
if self.alloc_grow(&mut *head, tydesc_start, end - tydesc_start) {
// Continuing with a newly allocated chunk
tydesc_start = 0;
start = round_up(mem::size_of::<*const TyDesc>(), align);
end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>());
}
}
head.fill.set(end);
unsafe {
let buf = head.as_ptr();
(buf.offset(tydesc_start as isize),
buf.offset(start as isize))
}
}
#[inline]
fn alloc_noncopy<T, F>(&self, op: F) -> &mut T
where F: FnOnce() -> T
{
unsafe {
let tydesc = get_tydesc::<T>();
let (ty_ptr, ptr) = self.alloc_noncopy_inner(mem::size_of::<T>(), mem::align_of::<T>());
let ty_ptr = ty_ptr as *mut usize;
let ptr = ptr as *mut T;
// Write in our tydesc along with a bit indicating that it
// has *not* been initialized yet.
*ty_ptr = bitpack_tydesc_ptr(tydesc, false);
// Actually initialize it
ptr::write(&mut (*ptr), op());
// Now that we are done, update the tydesc to indicate that
// the object is there.
*ty_ptr = bitpack_tydesc_ptr(tydesc, true);
&mut *ptr
}
}
/// Allocates a new item in the arena, using `op` to initialize the value,
/// and returns a reference to it.
#[inline]
pub fn alloc<T: 'longer_than_self, F>(&self, op: F) -> &mut T
where F: FnOnce() -> T
{
unsafe {
if intrinsics::needs_drop::<T>() {
self.alloc_noncopy(op)
} else {
self.alloc_copy(op)
}
}
}
/// Allocates a slice of bytes of requested length. The bytes are not guaranteed to be zero
/// if the arena has previously been cleared.
///
/// # Panics
///
/// Panics if the requested length is too large and causes overflow.
pub fn alloc_bytes(&self, len: usize) -> &mut [u8] {
unsafe {
// Check for overflow.
self.copy_head.borrow().fill.get().checked_add(len).expect("length overflow");
let ptr = self.alloc_copy_inner(len, 1);
intrinsics::assume(!ptr.is_null());
slice::from_raw_parts_mut(ptr as *mut _, len)
}
}
/// Clears the arena. Deallocates all but the longest chunk which may be reused.
pub fn clear(&mut self) {
unsafe {
self.head.borrow().destroy();
self.head.borrow().fill.set(0);
self.copy_head.borrow().fill.set(0);
for chunk in self.chunks.borrow().iter() {
if !chunk.is_copy.get() {
chunk.destroy();
}
}
self.chunks.borrow_mut().clear();
}
}
}
#[test]
fn test_arena_destructors() {
let arena = Arena::new();
for i in 0..10 {
// Arena allocate something with drop glue to make sure it
// doesn't leak.
arena.alloc(|| Rc::new(i));
// Allocate something with funny size and alignment, to keep
// things interesting.
arena.alloc(|| [0u8, 1u8, 2u8]);
}
}
#[test]
#[should_panic]
fn test_arena_destructors_fail() {
let arena = Arena::new();
// Put some stuff in the arena.
for i in 0..10 {
// Arena allocate something with drop glue to make sure it
// doesn't leak.
arena.alloc(|| Rc::new(i));
// Allocate something with funny size and alignment, to keep
// things interesting.
arena.alloc(|| [0u8, 1, 2]);
}
// Now, panic while allocating
arena.alloc::<Rc<i32>, _>(|| {
panic!();
});
}
/// A faster arena that can hold objects of only one type.
pub struct TypedArena<T> {
/// A pointer to the next object to be allocated.
ptr: Cell<*mut T>,
/// A pointer to the end of the allocated area. When this pointer is
/// reached, a new chunk is allocated.
end: Cell<*mut T>,
/// A vector arena segments.
chunks: RefCell<Vec<TypedArenaChunk<T>>>,
/// Marker indicating that dropping the arena causes its owned
/// instances of `T` to be dropped.
_own: PhantomData<T>,
}
struct TypedArenaChunk<T> {
/// Pointer to the next arena segment.
storage: RawVec<T>,
}
impl<T> TypedArenaChunk<T> {
#[inline]
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
TypedArenaChunk { storage: RawVec::with_capacity(capacity) }
}
/// Destroys this arena chunk.
#[inline]
unsafe fn destroy(&mut self, len: usize) {
// The branch on needs_drop() is an -O1 performance optimization.
// Without the branch, dropping TypedArena<u8> takes linear time.
if intrinsics::needs_drop::<T>() {
let mut start = self.start();
// Destroy all allocated objects.
for _ in 0..len {
ptr::drop_in_place(start);
start = start.offset(1);
}
}
}
// Returns a pointer to the first allocated object.
#[inline]
fn start(&self) -> *mut T {
self.storage.ptr()
}
// Returns a pointer to the end of the allocated space.
#[inline]
fn end(&self) -> *mut T {
unsafe {
if mem::size_of::<T>() == 0 {
// A pointer as large as possible for zero-sized elements.
!0 as *mut T
} else {
self.start().offset(self.storage.cap() as isize)
}
}
}
}
const PAGE: usize = 4096;
impl<T> TypedArena<T> {
/// Creates a new `TypedArena` with preallocated space for many objects.
#[inline]
pub fn new() -> TypedArena<T> {
// Reserve at least one page.
let elem_size = cmp::max(1, mem::size_of::<T>());
TypedArena::with_capacity(PAGE / elem_size)
}
/// Creates a new `TypedArena` with preallocated space for the given number of
/// objects.
#[inline]
pub fn with_capacity(capacity: usize) -> TypedArena<T> {
unsafe {
let chunk = TypedArenaChunk::<T>::new(cmp::max(1, capacity));
TypedArena {
ptr: Cell::new(chunk.start()),
end: Cell::new(chunk.end()),
chunks: RefCell::new(vec![chunk]),
_own: PhantomData,
}
}
}
/// Allocates an object in the `TypedArena`, returning a reference to it.
#[inline]
pub fn alloc(&self, object: T) -> &mut T {
if self.ptr == self.end {
self.grow()
}
unsafe {
if mem::size_of::<T>() == 0 {
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
let ptr = heap::EMPTY as *mut T;
// Don't drop the object. This `write` is equivalent to `forget`.
ptr::write(ptr, object);
&mut *ptr
} else {
let ptr = self.ptr.get();
// Advance the pointer.
self.ptr.set(self.ptr.get().offset(1));
// Write into uninitialized memory.
ptr::write(ptr, object);
&mut *ptr
}
}
}
/// Grows the arena.
#[inline(never)]
#[cold]
fn grow(&self) {
unsafe {
let mut chunks = self.chunks.borrow_mut();
let prev_capacity = chunks.last().unwrap().storage.cap();
let new_capacity = prev_capacity.checked_mul(2).unwrap();
if chunks.last_mut().unwrap().storage.double_in_place() {
self.end.set(chunks.last().unwrap().end());
} else {
let chunk = TypedArenaChunk::<T>::new(new_capacity);
self.ptr.set(chunk.start());
self.end.set(chunk.end());
chunks.push(chunk);
}
}
}
/// Clears the arena. Deallocates all but the longest chunk which may be reused.
pub fn clear(&mut self) {
unsafe {
// Clear the last chunk, which is partially filled.
let mut chunks_borrow = self.chunks.borrow_mut();
let last_idx = chunks_borrow.len() - 1;
self.clear_last_chunk(&mut chunks_borrow[last_idx]);
// If `T` is ZST, code below has no effect.
for mut chunk in chunks_borrow.drain(..last_idx) {
let cap = chunk.storage.cap();
chunk.destroy(cap);
}
}
}
// Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
// chunks.
fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
// Determine how much was filled.
let start = last_chunk.start() as usize;
// We obtain the value of the pointer to the first uninitialized element.
let end = self.ptr.get() as usize;
// We then calculate the number of elements to be dropped in the last chunk,
// which is the filled area's length.
let diff = if mem::size_of::<T>() == 0 {
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
// the number of zero-sized values in the last and only chunk, just out of caution.
// Recall that `end` was incremented for each allocated value.
end - start
} else {
(end - start) / mem::size_of::<T>()
};
// Pass that to the `destroy` method.
unsafe {
last_chunk.destroy(diff);
}
// Reset the chunk.
self.ptr.set(last_chunk.start());
}
}
impl<T> Drop for TypedArena<T> {
#[unsafe_destructor_blind_to_params]
fn drop(&mut self) {
unsafe {
// Determine how much was filled.
let mut chunks_borrow = self.chunks.borrow_mut();
let mut last_chunk = chunks_borrow.pop().unwrap();
// Drop the contents of the last chunk.
self.clear_last_chunk(&mut last_chunk);
// The last chunk will be dropped. Destroy all other chunks.
for chunk in chunks_borrow.iter_mut() {
let cap = chunk.storage.cap();
chunk.destroy(cap);
}
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
}
}
}
unsafe impl<T: Send> Send for TypedArena<T> {}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use super::{Arena, TypedArena};
use std::rc::Rc;
#[allow(dead_code)]
struct Point {
x: i32,
y: i32,
z: i32,
}
#[test]
fn test_arena_alloc_nested() {
struct Inner {
value: u8,
}
struct Outer<'a> {
inner: &'a Inner,
}
enum EI<'e> {
I(Inner),
O(Outer<'e>),
}
struct Wrap<'a>(TypedArena<EI<'a>>);
impl<'a> Wrap<'a> {
fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
let r: &EI = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
} else {
panic!("mismatch");
}
}
fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
let r: &EI = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
} else {
panic!("mismatch");
}
}
}
let arena = Wrap(TypedArena::new());
let result = arena.alloc_outer(|| {
Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) }
});
assert_eq!(result.inner.value, 10);
}
#[test]
pub fn test_copy() {
let arena = TypedArena::new();
for _ in 0..100000 {
arena.alloc(Point { x: 1, y: 2, z: 3 });
}
}
#[bench]
pub fn bench_copy(b: &mut Bencher) {
let arena = TypedArena::new();
b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
}
#[bench]
pub fn bench_copy_nonarena(b: &mut Bencher) {
b.iter(|| {
let _: Box<_> = Box::new(Point {
x: 1,
y: 2,
z: 3
});
})
}
#[bench]
pub fn bench_copy_old_arena(b: &mut Bencher) {
let arena = Arena::new();
b.iter(|| arena.alloc(|| Point { x: 1, y: 2, z: 3 }))
}
#[allow(dead_code)]
struct Noncopy {
string: String,
array: Vec<i32>,
}
#[test]
pub fn test_noncopy() {
let arena = TypedArena::new();
for _ in 0..100000 {
arena.alloc(Noncopy {
string: "hello world".to_string(),
array: vec![1, 2, 3, 4, 5],
});
}
}
#[bench]
pub fn bench_noncopy(b: &mut Bencher) {
let arena = TypedArena::new();
b.iter(|| {
arena.alloc(Noncopy {
string: "hello world".to_string(),
array: vec![1, 2, 3, 4, 5],
})
})
}
#[bench]
pub fn bench_noncopy_nonarena(b: &mut Bencher) {
b.iter(|| {
let _: Box<_> = Box::new(Noncopy {
string: "hello world".to_string(),
array: vec![1, 2, 3, 4, 5],
});
})
}
#[bench]
pub fn bench_noncopy_old_arena(b: &mut Bencher) {
let arena = Arena::new();
b.iter(|| {
arena.alloc(|| {
Noncopy {
string: "hello world".to_string(),
array: vec![1, 2, 3, 4, 5],
}
})
})
}
#[test]
pub fn test_typed_arena_zero_sized() {
let arena = TypedArena::new();
for _ in 0..100000 {
arena.alloc(());
}
}
#[test]
pub fn test_arena_zero_sized() {
let arena = Arena::new();
for _ in 0..1000 {
for _ in 0..100 {
arena.alloc(|| ());
}
arena.alloc(|| Point {
x: 1,
y: 2,
z: 3,
});
}
}
#[test]
pub fn test_typed_arena_clear() {
let mut arena = TypedArena::new();
for _ in 0..10 {
arena.clear();
for _ in 0..10000 {
arena.alloc(Point {
x: 1,
y: 2,
z: 3,
});
}
}
}
#[test]
pub fn test_arena_clear() {
let mut arena = Arena::new();
for _ in 0..10 {
arena.clear();
for _ in 0..10000 {
arena.alloc(|| Point {
x: 1,
y: 2,
z: 3,
});
arena.alloc(|| Noncopy {
string: "hello world".to_string(),
array: vec![],
});
}
}
}
#[test]
pub fn test_arena_alloc_bytes() {
let arena = Arena::new();
for i in 0..10000 {
arena.alloc(|| Point {
x: 1,
y: 2,
z: 3,
});
for byte in arena.alloc_bytes(i % 42).iter_mut() {
*byte = i as u8;
}
}
}
}