|
1 | 1 | use core::ptr::Unique; |
2 | 2 | use core::mem::{self, size_of}; |
3 | | -use core::intrinsics; |
| 3 | + |
| 4 | +#[cfg(not(test))] |
| 5 | +macro_rules! println { |
| 6 | + ($fmt:expr) => { }; |
| 7 | + ($fmt:expr, $($arg:tt)*) => { }; |
| 8 | +} |
4 | 9 |
|
5 | 10 | use super::align_up; |
6 | 11 |
|
7 | | -pub struct Hole { |
8 | | - pub size: usize, |
9 | | - pub next: Option<Unique<Hole>>, |
| 12 | +pub struct HoleList { |
| 13 | + first: Hole, // dummy |
10 | 14 | } |
11 | 15 |
|
12 | | -impl Hole { |
13 | | - // Returns the first hole that is big enough starting at the **next** hole. The reason is that |
14 | | - // it is implemented as a single linked list (we need to update the previous pointer). So even |
15 | | - // if _this_ hole would be large enough, it won't be used. |
16 | | - pub fn get_first_fit(&mut self, size: usize, align: usize) -> Option<Unique<Hole>> { |
17 | | - assert!(size % size_of::<usize>() == 0); |
18 | | - // align must be a power of two |
19 | | - assert!(unsafe { intrinsics::ctpop(align) } == 1); // exactly one bit set |
20 | | - |
21 | | - // take the next hole and set `self.next` to None |
22 | | - match self.next.take() { |
23 | | - None => None, |
24 | | - Some(mut next) => { |
25 | | - let next_addr = *next as usize; |
26 | | - let start_addr = align_up(next_addr, align); |
27 | | - |
28 | | - // the needed padding for the desired alignment |
29 | | - let padding = start_addr - next_addr; |
30 | | - assert!(padding == 0 || padding >= size_of::<usize>() * 2); // TODO |
31 | | - let next_real_size = unsafe { next.get() }.size - padding; |
32 | | - |
33 | | - if next_real_size == size { |
34 | | - let next_next: Option<Unique<_>> = unsafe { next.get_mut() }.next.take(); |
35 | | - self.next = next_next; |
36 | | - Some(next) |
37 | | - } else if next_real_size > size { |
38 | | - let next_next: Option<Unique<_>> = unsafe { next.get_mut() }.next.take(); |
39 | | - let new_hole = Hole { |
40 | | - size: next_real_size - size, |
41 | | - next: next_next, |
42 | | - }; |
43 | | - unsafe { |
44 | | - let mut new_hole_ptr = Unique::new((start_addr + size) as *mut Hole); |
45 | | - mem::forget(mem::replace(new_hole_ptr.get_mut(), new_hole)); |
46 | | - self.next = Some(new_hole_ptr); |
47 | | - } |
48 | | - Some(next) |
49 | | - } else { |
50 | | - let ret = unsafe { next.get_mut().get_first_fit(size, align) }; |
51 | | - self.next = Some(next); |
52 | | - ret |
53 | | - } |
54 | | - } |
| 16 | +impl HoleList { |
| 17 | + pub unsafe fn new(ptr: *mut Hole, size: usize) -> HoleList { |
| 18 | + assert!(size_of::<Hole>() == Self::min_size()); |
| 19 | + |
| 20 | + mem::forget(mem::replace(&mut *ptr, |
| 21 | + Hole { |
| 22 | + size: size, |
| 23 | + next: None, |
| 24 | + })); |
| 25 | + |
| 26 | + HoleList { |
| 27 | + first: Hole { |
| 28 | + size: 0, |
| 29 | + next: Some(Unique::new(ptr)), |
| 30 | + }, |
55 | 31 | } |
56 | 32 | } |
57 | 33 |
|
58 | | - pub fn add_hole(&mut self, mut hole: Unique<Hole>) { |
59 | | - unsafe { |
60 | | - if hole.get().size == 0 { |
61 | | - return; |
| 34 | + pub fn allocate_first_fit(&mut self, size: usize, align: usize) -> Option<*mut u8> { |
| 35 | + println!("allocate {} bytes (align {})", size, align); |
| 36 | + assert!(size >= Self::min_size()); |
| 37 | + |
| 38 | + if let Some((start_addr, front_padding, back_padding)) = |
| 39 | + allocate_first_fit(&mut self.first, size, align) { |
| 40 | + if let Some((padding_addr, padding_size)) = front_padding { |
| 41 | + self.deallocate(padding_addr as *mut u8, padding_size) |
| 42 | + } |
| 43 | + if let Some((padding_addr, padding_size)) = back_padding { |
| 44 | + self.deallocate(padding_addr as *mut u8, padding_size) |
62 | 45 | } |
63 | | - assert!(hole.get().size % size_of::<usize>() == 0); |
64 | | - assert!(hole.get().next.is_none()); |
| 46 | + Some(start_addr as *mut u8) |
| 47 | + } else { |
| 48 | + None |
65 | 49 | } |
| 50 | + } |
| 51 | + |
| 52 | + pub fn deallocate(&mut self, ptr: *mut u8, size: usize) { |
| 53 | + println!("deallocate {:p} ({} bytes)", ptr, size); |
| 54 | + assert!(size >= Self::min_size()); |
| 55 | + |
| 56 | + deallocate(&mut self.first, ptr as usize, size) |
| 57 | + } |
66 | 58 |
|
67 | | - let hole_addr = *hole as usize; |
| 59 | + pub fn min_size() -> usize { |
| 60 | + size_of::<usize>() * 2 |
| 61 | + } |
68 | 62 |
|
69 | | - if self.next.as_mut().map_or(false, |n| hole_addr < **n as usize) { |
70 | | - // hole is before start of next hole or this is the last hole |
71 | | - let self_addr = self as *mut _ as usize; |
| 63 | + #[cfg(test)] |
| 64 | + pub fn first_hole(&self) -> Option<(usize, usize)> { |
| 65 | + if let Some(first) = self.first.next.as_ref() { |
| 66 | + Some((**first as usize, unsafe { first.get().size })) |
| 67 | + } else { |
| 68 | + None |
| 69 | + } |
| 70 | + } |
| 71 | +} |
72 | 72 |
|
73 | | - if hole_addr == self_addr + self.size { |
74 | | - // new hole is right behind this hole, so we can just increase this's size |
75 | | - self.size += unsafe { hole.get().size }; |
| 73 | +pub struct Hole { |
| 74 | + pub size: usize, |
| 75 | + pub next: Option<Unique<Hole>>, |
| 76 | +} |
| 77 | + |
| 78 | +fn allocate_first_fit(previous: &mut Hole, |
| 79 | + size: usize, |
| 80 | + align: usize) |
| 81 | + -> Option<(usize, Option<(usize, usize)>, Option<(usize, usize)>)> { |
| 82 | + let mut front_padding = None; |
| 83 | + let mut back_padding = None; |
| 84 | + |
| 85 | + if previous.next.is_some() { |
| 86 | + let hole_addr = **previous.next.as_ref().unwrap() as usize; |
| 87 | + let aligned_hole_addr = align_up(hole_addr, align); |
| 88 | + |
| 89 | + if aligned_hole_addr > hole_addr { |
| 90 | + if aligned_hole_addr < hole_addr + HoleList::min_size() { |
| 91 | + // hole would cause a new, too small hole. try next hole |
| 92 | + return allocate_first_fit(unsafe { previous.next.as_mut().unwrap().get_mut() }, |
| 93 | + size, |
| 94 | + align); |
76 | 95 | } else { |
77 | | - // insert the hole behind this hole |
78 | | - unsafe { hole.get_mut() }.next = self.next.take(); |
79 | | - self.next = Some(hole); |
| 96 | + let padding_hole_size = aligned_hole_addr - hole_addr; |
| 97 | + front_padding = Some((hole_addr, padding_hole_size)); |
80 | 98 | } |
81 | | - } else { |
82 | | - // hole is behind next hole |
83 | | - assert!(self.next.is_some()); |
84 | | - let next = self.next.as_mut().unwrap(); |
85 | | - assert!(hole_addr > **next as usize); |
| 99 | + } |
| 100 | + |
| 101 | + let aligned_hole_size = unsafe { previous.next.as_ref().unwrap().get().size } - |
| 102 | + (aligned_hole_addr - hole_addr); |
86 | 103 |
|
87 | | - // insert it behind next hole |
88 | | - unsafe { next.get_mut().add_hole(hole) }; |
| 104 | + if aligned_hole_size > size { |
| 105 | + if aligned_hole_size - size < HoleList::min_size() { |
| 106 | + // hole would cause a new, too small hole. try next hole |
| 107 | + return allocate_first_fit(unsafe { previous.next.as_mut().unwrap().get_mut() }, |
| 108 | + size, |
| 109 | + align); |
| 110 | + } else { |
| 111 | + let padding_hole_size = aligned_hole_size - size; |
| 112 | + back_padding = Some((aligned_hole_addr + size, padding_hole_size)); |
| 113 | + } |
| 114 | + } |
| 115 | + |
| 116 | + if aligned_hole_size >= size { |
| 117 | + previous.next = unsafe { previous.next.as_mut().unwrap().get_mut().next.take() }; |
| 118 | + Some((aligned_hole_addr, front_padding, back_padding)) |
| 119 | + } else { |
| 120 | + // hole is too small, try next hole |
| 121 | + return allocate_first_fit(unsafe { previous.next.as_mut().unwrap().get_mut() }, |
| 122 | + size, |
| 123 | + align); |
89 | 124 | } |
| 125 | + } else { |
| 126 | + None |
| 127 | + } |
| 128 | +} |
| 129 | + |
| 130 | +fn deallocate(hole: &mut Hole, addr: usize, size: usize) { |
| 131 | + let hole_addr = if hole.size == 0 { |
| 132 | + 0 // dummy |
| 133 | + } else { |
| 134 | + hole as *mut _ as usize |
| 135 | + }; |
| 136 | + assert!(addr >= hole_addr + hole.size); |
| 137 | + |
| 138 | + if hole.next.is_some() && addr + size == **hole.next.as_ref().unwrap() as usize { |
| 139 | + // it is right before the the next hole -> delete the next hole and free the joined block |
| 140 | + println!("1"); |
| 141 | + let next_hole_next = unsafe { hole.next.as_mut().unwrap().get_mut() }.next.take(); |
| 142 | + let next_hole_size = unsafe { hole.next.as_ref().unwrap().get() }.size; |
| 143 | + hole.next = next_hole_next; |
| 144 | + deallocate(hole, addr, size + next_hole_size); |
| 145 | + } else if hole.next.is_some() && addr >= **hole.next.as_ref().unwrap() as usize { |
| 146 | + // it is behind the next hole -> delegate to next hole |
| 147 | + println!("2"); |
| 148 | + deallocate(unsafe { hole.next.as_mut().unwrap().get_mut() }, addr, size); |
| 149 | + } else if addr == hole_addr + hole.size { |
| 150 | + // the freed block is right behind this hole -> just increase the size |
| 151 | + println!("3"); |
| 152 | + hole.size += size; |
| 153 | + } else { |
| 154 | + // the freed block is before the next hole (or this is the last hole) |
| 155 | + println!("4"); |
| 156 | + let new_hole = Hole { |
| 157 | + size: size, |
| 158 | + next: hole.next.take(), |
| 159 | + }; |
| 160 | + let ptr = addr as *mut Hole; |
| 161 | + mem::forget(mem::replace(unsafe { &mut *ptr }, new_hole)); |
| 162 | + hole.next = Some(unsafe { Unique::new(ptr) }); |
90 | 163 | } |
91 | 164 | } |
0 commit comments