| 1 | #![cfg_attr ( |
| 2 | feature = "alloc_ref" , |
| 3 | feature(allocator_api, alloc_layout_extra, nonnull_slice_from_raw_parts) |
| 4 | )] |
| 5 | #![no_std ] |
| 6 | |
| 7 | #[cfg (any(test, fuzzing))] |
| 8 | #[macro_use ] |
| 9 | extern crate std; |
| 10 | |
| 11 | #[cfg (feature = "use_spin" )] |
| 12 | extern crate spinning_top; |
| 13 | |
| 14 | #[cfg (feature = "use_spin" )] |
| 15 | use core::alloc::GlobalAlloc; |
| 16 | use core::alloc::Layout; |
| 17 | #[cfg (feature = "alloc_ref" )] |
| 18 | use core::alloc::{AllocError, Allocator}; |
| 19 | use core::mem::MaybeUninit; |
| 20 | #[cfg (feature = "use_spin" )] |
| 21 | use core::ops::Deref; |
| 22 | use core::ptr::NonNull; |
| 23 | #[cfg (test)] |
| 24 | use hole::Hole; |
| 25 | use hole::HoleList; |
| 26 | #[cfg (feature = "use_spin" )] |
| 27 | use spinning_top::Spinlock; |
| 28 | |
| 29 | pub mod hole; |
| 30 | #[cfg (test)] |
| 31 | mod test; |
| 32 | |
| 33 | /// A fixed size heap backed by a linked list of free memory blocks. |
| 34 | pub struct Heap { |
| 35 | used: usize, |
| 36 | holes: HoleList, |
| 37 | } |
| 38 | |
| 39 | #[cfg (fuzzing)] |
| 40 | impl Heap { |
| 41 | pub fn debug(&mut self) { |
| 42 | println!( |
| 43 | "bottom: {:?}, top: {:?}, size: {}, pending: {}" , |
| 44 | self.bottom(), |
| 45 | self.top(), |
| 46 | self.size(), |
| 47 | self.holes.first.size, |
| 48 | ); |
| 49 | self.holes.debug(); |
| 50 | } |
| 51 | } |
| 52 | |
| 53 | unsafe impl Send for Heap {} |
| 54 | |
| 55 | impl Heap { |
| 56 | /// Creates an empty heap. All allocate calls will return `None`. |
| 57 | pub const fn empty() -> Heap { |
| 58 | Heap { |
| 59 | used: 0, |
| 60 | holes: HoleList::empty(), |
| 61 | } |
| 62 | } |
| 63 | |
| 64 | /// Initializes an empty heap |
| 65 | /// |
| 66 | /// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Self::bottom] |
| 67 | /// method might return a pointer that is larger than `heap_bottom` after construction. |
| 68 | /// |
| 69 | /// The given `heap_size` must be large enough to store the required |
| 70 | /// metadata, otherwise this function will panic. Depending on the |
| 71 | /// alignment of the `hole_addr` pointer, the minimum size is between |
| 72 | /// `2 * size_of::<usize>` and `3 * size_of::<usize>`. |
| 73 | /// |
| 74 | /// The usable size for allocations will be truncated to the nearest |
| 75 | /// alignment of `align_of::<usize>`. Any extra bytes left at the end |
| 76 | /// will be reclaimed once sufficient additional space is given to |
| 77 | /// [`extend`][Heap::extend]. |
| 78 | /// |
| 79 | /// # Safety |
| 80 | /// |
| 81 | /// This function must be called at most once and must only be used on an |
| 82 | /// empty heap. |
| 83 | /// |
| 84 | /// The bottom address must be valid and the memory in the |
| 85 | /// `[heap_bottom, heap_bottom + heap_size)` range must not be used for anything else. |
| 86 | /// This function is unsafe because it can cause undefined behavior if the given address |
| 87 | /// is invalid. |
| 88 | /// |
| 89 | /// The provided memory range must be valid for the `'static` lifetime. |
| 90 | pub unsafe fn init(&mut self, heap_bottom: *mut u8, heap_size: usize) { |
| 91 | self.used = 0; |
| 92 | self.holes = HoleList::new(heap_bottom, heap_size); |
| 93 | } |
| 94 | |
| 95 | /// Initialize an empty heap with provided memory. |
| 96 | /// |
| 97 | /// The caller is responsible for procuring a region of raw memory that may be utilized by the |
| 98 | /// allocator. This might be done via any method such as (unsafely) taking a region from the |
| 99 | /// program's memory, from a mutable static, or by allocating and leaking such memory from |
| 100 | /// another allocator. |
| 101 | /// |
| 102 | /// The latter approach may be especially useful if the underlying allocator does not perform |
| 103 | /// deallocation (e.g. a simple bump allocator). Then the overlaid linked-list-allocator can |
| 104 | /// provide memory reclamation. |
| 105 | /// |
| 106 | /// The usable size for allocations will be truncated to the nearest |
| 107 | /// alignment of `align_of::<usize>`. Any extra bytes left at the end |
| 108 | /// will be reclaimed once sufficient additional space is given to |
| 109 | /// [`extend`][Heap::extend]. |
| 110 | /// |
| 111 | /// # Panics |
| 112 | /// |
| 113 | /// This method panics if the heap is already initialized. |
| 114 | /// |
| 115 | /// It also panics when the length of the given `mem` slice is not large enough to |
| 116 | /// store the required metadata. Depending on the alignment of the slice, the minimum |
| 117 | /// size is between `2 * size_of::<usize>` and `3 * size_of::<usize>`. |
| 118 | pub fn init_from_slice(&mut self, mem: &'static mut [MaybeUninit<u8>]) { |
| 119 | assert!( |
| 120 | self.bottom().is_null(), |
| 121 | "The heap has already been initialized." |
| 122 | ); |
| 123 | let size = mem.len(); |
| 124 | let address = mem.as_mut_ptr().cast(); |
| 125 | // SAFETY: All initialization requires the bottom address to be valid, which implies it |
| 126 | // must not be 0. Initially the address is 0. The assertion above ensures that no |
| 127 | // initialization had been called before. |
| 128 | // The given address and size is valid according to the safety invariants of the mutable |
| 129 | // reference handed to us by the caller. |
| 130 | unsafe { self.init(address, size) } |
| 131 | } |
| 132 | |
| 133 | /// Creates a new heap with the given `bottom` and `size`. |
| 134 | /// |
| 135 | /// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Self::bottom] |
| 136 | /// method might return a pointer that is larger than `heap_bottom` after construction. |
| 137 | /// |
| 138 | /// The given `heap_size` must be large enough to store the required |
| 139 | /// metadata, otherwise this function will panic. Depending on the |
| 140 | /// alignment of the `hole_addr` pointer, the minimum size is between |
| 141 | /// `2 * size_of::<usize>` and `3 * size_of::<usize>`. |
| 142 | /// |
| 143 | /// The usable size for allocations will be truncated to the nearest |
| 144 | /// alignment of `align_of::<usize>`. Any extra bytes left at the end |
| 145 | /// will be reclaimed once sufficient additional space is given to |
| 146 | /// [`extend`][Heap::extend]. |
| 147 | /// |
| 148 | /// # Safety |
| 149 | /// |
| 150 | /// The bottom address must be valid and the memory in the |
| 151 | /// `[heap_bottom, heap_bottom + heap_size)` range must not be used for anything else. |
| 152 | /// This function is unsafe because it can cause undefined behavior if the given address |
| 153 | /// is invalid. |
| 154 | /// |
| 155 | /// The provided memory range must be valid for the `'static` lifetime. |
| 156 | pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> Heap { |
| 157 | Heap { |
| 158 | used: 0, |
| 159 | holes: HoleList::new(heap_bottom, heap_size), |
| 160 | } |
| 161 | } |
| 162 | |
| 163 | /// Creates a new heap from a slice of raw memory. |
| 164 | /// |
| 165 | /// This is a convenience function that has the same effect as calling |
| 166 | /// [`init_from_slice`] on an empty heap. All the requirements of `init_from_slice` |
| 167 | /// apply to this function as well. |
| 168 | pub fn from_slice(mem: &'static mut [MaybeUninit<u8>]) -> Heap { |
| 169 | let size = mem.len(); |
| 170 | let address = mem.as_mut_ptr().cast(); |
| 171 | // SAFETY: The given address and size is valid according to the safety invariants of the |
| 172 | // mutable reference handed to us by the caller. |
| 173 | unsafe { Self::new(address, size) } |
| 174 | } |
| 175 | |
| 176 | /// Allocates a chunk of the given size with the given alignment. Returns a pointer to the |
| 177 | /// beginning of that chunk if it was successful. Else it returns `None`. |
| 178 | /// This function scans the list of free memory blocks and uses the first block that is big |
| 179 | /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be |
| 180 | /// reasonably fast for small allocations. |
| 181 | // |
| 182 | // NOTE: We could probably replace this with an `Option` instead of a `Result` in a later |
| 183 | // release to remove this clippy warning |
| 184 | #[allow (clippy::result_unit_err)] |
| 185 | pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> { |
| 186 | match self.holes.allocate_first_fit(layout) { |
| 187 | Ok((ptr, aligned_layout)) => { |
| 188 | self.used += aligned_layout.size(); |
| 189 | Ok(ptr) |
| 190 | } |
| 191 | Err(err) => Err(err), |
| 192 | } |
| 193 | } |
| 194 | |
| 195 | /// Frees the given allocation. `ptr` must be a pointer returned |
| 196 | /// by a call to the `allocate_first_fit` function with identical size and alignment. |
| 197 | /// |
| 198 | /// This function walks the list of free memory blocks and inserts the freed block at the |
| 199 | /// correct place. If the freed block is adjacent to another free block, the blocks are merged |
| 200 | /// again. This operation is in `O(n)` since the list needs to be sorted by address. |
| 201 | /// |
| 202 | /// # Safety |
| 203 | /// |
| 204 | /// `ptr` must be a pointer returned by a call to the [`allocate_first_fit`] function with |
| 205 | /// identical layout. Undefined behavior may occur for invalid arguments. |
| 206 | pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) { |
| 207 | self.used -= self.holes.deallocate(ptr, layout).size(); |
| 208 | } |
| 209 | |
| 210 | /// Returns the bottom address of the heap. |
| 211 | /// |
| 212 | /// The bottom pointer is automatically aligned, so the returned pointer |
| 213 | /// might be larger than the bottom pointer used for initialization. |
| 214 | pub fn bottom(&self) -> *mut u8 { |
| 215 | self.holes.bottom |
| 216 | } |
| 217 | |
| 218 | /// Returns the size of the heap. |
| 219 | /// |
| 220 | /// This is the size the heap is using for allocations, not necessarily the |
| 221 | /// total amount of bytes given to the heap. To determine the exact memory |
| 222 | /// boundaries, use [`bottom`][Self::bottom] and [`top`][Self::top]. |
| 223 | pub fn size(&self) -> usize { |
| 224 | unsafe { self.holes.top.offset_from(self.holes.bottom) as usize } |
| 225 | } |
| 226 | |
| 227 | /// Return the top address of the heap. |
| 228 | /// |
| 229 | /// Note: The heap may choose to not use bytes at the end for allocations |
| 230 | /// until there is enough room for metadata, but it still retains ownership |
| 231 | /// over memory from [`bottom`][Self::bottom] to the address returned. |
| 232 | pub fn top(&self) -> *mut u8 { |
| 233 | unsafe { self.holes.top.add(self.holes.pending_extend as usize) } |
| 234 | } |
| 235 | |
| 236 | /// Returns the size of the used part of the heap |
| 237 | pub fn used(&self) -> usize { |
| 238 | self.used |
| 239 | } |
| 240 | |
| 241 | /// Returns the size of the free part of the heap |
| 242 | pub fn free(&self) -> usize { |
| 243 | self.size() - self.used |
| 244 | } |
| 245 | |
| 246 | /// Extends the size of the heap by creating a new hole at the end. |
| 247 | /// |
| 248 | /// Small extensions are not guaranteed to grow the usable size of |
| 249 | /// the heap. In order to grow the Heap most effectively, extend by |
| 250 | /// at least `2 * size_of::<usize>`, keeping the amount a multiple of |
| 251 | /// `size_of::<usize>`. |
| 252 | /// |
| 253 | /// Calling this method on an uninitialized Heap will panic. |
| 254 | /// |
| 255 | /// # Safety |
| 256 | /// |
| 257 | /// The amount of data given in `by` MUST exist directly after the original |
| 258 | /// range of data provided when constructing the [Heap]. The additional data |
| 259 | /// must have the same lifetime of the original range of data. |
| 260 | /// |
| 261 | /// Even if this operation doesn't increase the [usable size][`Self::size`] |
| 262 | /// by exactly `by` bytes, those bytes are still owned by the Heap for |
| 263 | /// later use. |
| 264 | pub unsafe fn extend(&mut self, by: usize) { |
| 265 | self.holes.extend(by); |
| 266 | } |
| 267 | } |
| 268 | |
| 269 | #[cfg (all(feature = "alloc_ref" , feature = "use_spin" ))] |
| 270 | unsafe impl Allocator for LockedHeap { |
| 271 | fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
| 272 | if layout.size() == 0 { |
| 273 | return Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)); |
| 274 | } |
| 275 | match self.0.lock().allocate_first_fit(layout) { |
| 276 | Ok(ptr) => Ok(NonNull::slice_from_raw_parts(ptr, layout.size())), |
| 277 | Err(()) => Err(AllocError), |
| 278 | } |
| 279 | } |
| 280 | |
| 281 | unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { |
| 282 | if layout.size() != 0 { |
| 283 | self.0.lock().deallocate(ptr, layout); |
| 284 | } |
| 285 | } |
| 286 | } |
| 287 | |
| 288 | #[cfg (feature = "use_spin" )] |
| 289 | pub struct LockedHeap(Spinlock<Heap>); |
| 290 | |
| 291 | #[cfg (feature = "use_spin" )] |
| 292 | impl LockedHeap { |
| 293 | pub const fn empty() -> LockedHeap { |
| 294 | LockedHeap(Spinlock::new(Heap::empty())) |
| 295 | } |
| 296 | |
| 297 | /// Creates a new heap with the given `bottom` and `size`. |
| 298 | /// |
| 299 | /// The `heap_bottom` pointer is automatically aligned, so the [`bottom()`][Heap::bottom] |
| 300 | /// method might return a pointer that is larger than `heap_bottom` after construction. |
| 301 | /// |
| 302 | /// The given `heap_size` must be large enough to store the required |
| 303 | /// metadata, otherwise this function will panic. Depending on the |
| 304 | /// alignment of the `hole_addr` pointer, the minimum size is between |
| 305 | /// `2 * size_of::<usize>` and `3 * size_of::<usize>`. |
| 306 | /// |
| 307 | /// # Safety |
| 308 | /// |
| 309 | /// The bottom address must be valid and the memory in the |
| 310 | /// `[heap_bottom, heap_bottom + heap_size)` range must not be used for anything else. |
| 311 | /// This function is unsafe because it can cause undefined behavior if the given address |
| 312 | /// is invalid. |
| 313 | /// |
| 314 | /// The provided memory range must be valid for the `'static` lifetime. |
| 315 | pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> LockedHeap { |
| 316 | LockedHeap(Spinlock::new(Heap { |
| 317 | used: 0, |
| 318 | holes: HoleList::new(heap_bottom, heap_size), |
| 319 | })) |
| 320 | } |
| 321 | } |
| 322 | |
| 323 | #[cfg (feature = "use_spin" )] |
| 324 | impl Deref for LockedHeap { |
| 325 | type Target = Spinlock<Heap>; |
| 326 | |
| 327 | fn deref(&self) -> &Spinlock<Heap> { |
| 328 | &self.0 |
| 329 | } |
| 330 | } |
| 331 | |
| 332 | #[cfg (feature = "use_spin" )] |
| 333 | unsafe impl GlobalAlloc for LockedHeap { |
| 334 | unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
| 335 | self.0 |
| 336 | .lock() |
| 337 | .allocate_first_fit(layout) |
| 338 | .ok() |
| 339 | .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr()) |
| 340 | } |
| 341 | |
| 342 | unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { |
| 343 | self.0 |
| 344 | .lock() |
| 345 | .deallocate(NonNull::new_unchecked(ptr), layout) |
| 346 | } |
| 347 | } |
| 348 | |
| 349 | /// Align downwards. Returns the greatest x with alignment `align` |
| 350 | /// so that x <= addr. The alignment must be a power of 2. |
| 351 | pub fn align_down_size(size: usize, align: usize) -> usize { |
| 352 | if align.is_power_of_two() { |
| 353 | size & !(align - 1) |
| 354 | } else if align == 0 { |
| 355 | size |
| 356 | } else { |
| 357 | panic!("`align` must be a power of 2" ); |
| 358 | } |
| 359 | } |
| 360 | |
| 361 | pub fn align_up_size(size: usize, align: usize) -> usize { |
| 362 | align_down_size(size:size + align - 1, align) |
| 363 | } |
| 364 | |
| 365 | /// Align upwards. Returns the smallest x with alignment `align` |
| 366 | /// so that x >= addr. The alignment must be a power of 2. |
| 367 | pub fn align_up(addr: *mut u8, align: usize) -> *mut u8 { |
| 368 | let offset: usize = addr.align_offset(align); |
| 369 | addr.wrapping_add(count:offset) |
| 370 | } |
| 371 | |