1#![doc = include_str!("../README.md")]
2#![no_std]
3#![cfg_attr(feature = "allocator_api", feature(allocator_api, alloc_layout_extra))]
4
5use core::alloc::{GlobalAlloc, Layout};
6use core::cell::RefCell;
7use core::ptr::{self, NonNull};
8
9use critical_section::Mutex;
10use linked_list_allocator::Heap as LLHeap;
11
12pub struct Heap {
13 heap: Mutex<RefCell<LLHeap>>,
14}
15
16impl Heap {
17 /// Crate a new UNINITIALIZED heap allocator
18 ///
19 /// You must initialize this heap using the
20 /// [`init`](Self::init) method before using the allocator.
21 pub const fn empty() -> Heap {
22 Heap {
23 heap: Mutex::new(RefCell::new(LLHeap::empty())),
24 }
25 }
26
27 /// Initializes the heap
28 ///
29 /// This function must be called BEFORE you run any code that makes use of the
30 /// allocator.
31 ///
32 /// `start_addr` is the address where the heap will be located.
33 ///
34 /// `size` is the size of the heap in bytes.
35 ///
36 /// Note that:
37 ///
38 /// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
39 /// be the smallest address used.
40 ///
41 /// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
42 /// `0x1000` and `size` is `0x30000` then the allocator won't use memory at
43 /// addresses `0x31000` and larger.
44 ///
45 /// # Safety
46 ///
47 /// Obey these or Bad Stuff will happen.
48 ///
49 /// - This function must be called exactly ONCE.
50 /// - `size > 0`
51 pub unsafe fn init(&self, start_addr: usize, size: usize) {
52 critical_section::with(|cs| {
53 self.heap
54 .borrow(cs)
55 .borrow_mut()
56 .init(start_addr as *mut u8, size);
57 });
58 }
59
60 /// Returns an estimate of the amount of bytes in use.
61 pub fn used(&self) -> usize {
62 critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().used())
63 }
64
65 /// Returns an estimate of the amount of bytes available.
66 pub fn free(&self) -> usize {
67 critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().free())
68 }
69
70 fn alloc_first_fit(&self, layout: Layout) -> Result<NonNull<u8>, ()> {
71 critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().allocate_first_fit(layout))
72 }
73}
74
75unsafe impl GlobalAlloc for Heap {
76 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
77 self.alloc_first_fit(layout)
78 .ok()
79 .map_or(default:ptr::null_mut(), |allocation: NonNull| allocation.as_ptr())
80 }
81
82 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
83 critical_section::with(|cs: CriticalSection<'_>| {
84 self.heap
85 .borrow(cs)
86 .borrow_mut()
87 .deallocate(ptr:NonNull::new_unchecked(ptr), layout)
88 });
89 }
90}
91
92#[cfg(feature = "allocator_api")]
93mod allocator_api {
94 use core::alloc::{AllocError, Allocator, GlobalAlloc, Layout};
95 use core::ptr::NonNull;
96
97 unsafe impl Allocator for crate::Heap {
98 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
99 match layout.size() {
100 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
101 size => self
102 .alloc_first_fit(layout)
103 .map(|allocation| NonNull::slice_from_raw_parts(allocation, size))
104 .map_err(|_| AllocError),
105 }
106 }
107
108 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
109 if layout.size() != 0 {
110 self.dealloc(ptr.as_ptr(), layout);
111 }
112 }
113 }
114}
115