1//! A minimal arena allocator inspired by `rustc_arena::DroplessArena`.
2//!
3//! This is unfortunately a minimal re-implementation rather than a dependency
4//! as it is difficult to depend on crates from within `proc_macro`, due to it
5//! being built at the same time as `std`.
6
7use std::cell::{Cell, RefCell};
8use std::cmp;
9use std::mem::MaybeUninit;
10use std::ops::Range;
11use std::ptr;
12use std::slice;
13use std::str;
14
15// The arenas start with PAGE-sized chunks, and then each new chunk is twice as
16// big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
17// we stop growing. This scales well, from arenas that are barely used up to
18// arenas that are used for 100s of MiBs. Note also that the chosen sizes match
19// the usual sizes of pages and huge pages on Linux.
20const PAGE: usize = 4096;
21const HUGE_PAGE: usize = 2 * 1024 * 1024;
22
23/// A minimal arena allocator inspired by `rustc_arena::DroplessArena`.
24///
25/// This is unfortunately a complete re-implementation rather than a dependency
26/// as it is difficult to depend on crates from within `proc_macro`, due to it
27/// being built at the same time as `std`.
28///
29/// This arena doesn't have support for allocating anything other than byte
30/// slices, as that is all that is necessary.
31pub(crate) struct Arena {
32 start: Cell<*mut MaybeUninit<u8>>,
33 end: Cell<*mut MaybeUninit<u8>>,
34 chunks: RefCell<Vec<Box<[MaybeUninit<u8>]>>>,
35}
36
37impl Arena {
38 pub(crate) fn new() -> Self {
39 Arena {
40 start: Cell::new(ptr::null_mut()),
41 end: Cell::new(ptr::null_mut()),
42 chunks: RefCell::new(Vec::new()),
43 }
44 }
45
46 /// Add a new chunk with at least `additional` free bytes.
47 #[inline(never)]
48 #[cold]
49 fn grow(&self, additional: usize) {
50 let mut chunks = self.chunks.borrow_mut();
51 let mut new_cap;
52 if let Some(last_chunk) = chunks.last_mut() {
53 // If the previous chunk's len is less than HUGE_PAGE
54 // bytes, then this chunk will be least double the previous
55 // chunk's size.
56 new_cap = last_chunk.len().min(HUGE_PAGE / 2);
57 new_cap *= 2;
58 } else {
59 new_cap = PAGE;
60 }
61 // Also ensure that this chunk can fit `additional`.
62 new_cap = cmp::max(additional, new_cap);
63
64 let mut chunk = Box::new_uninit_slice(new_cap);
65 let Range { start, end } = chunk.as_mut_ptr_range();
66 self.start.set(start);
67 self.end.set(end);
68 chunks.push(chunk);
69 }
70
71 /// Allocates a byte slice with specified size from the current memory
72 /// chunk. Returns `None` if there is no free space left to satisfy the
73 /// request.
74 fn alloc_raw_without_grow(&self, bytes: usize) -> Option<&mut [MaybeUninit<u8>]> {
75 let start = self.start.get().addr();
76 let old_end = self.end.get();
77 let end = old_end.addr();
78
79 let new_end = end.checked_sub(bytes)?;
80 if start <= new_end {
81 let new_end = old_end.with_addr(new_end);
82 self.end.set(new_end);
83 // SAFETY: `bytes` bytes starting at `new_end` were just reserved.
84 Some(unsafe { slice::from_raw_parts_mut(new_end, bytes) })
85 } else {
86 None
87 }
88 }
89
90 fn alloc_raw(&self, bytes: usize) -> &mut [MaybeUninit<u8>] {
91 if bytes == 0 {
92 return &mut [];
93 }
94
95 loop {
96 if let Some(a) = self.alloc_raw_without_grow(bytes) {
97 break a;
98 }
99 // No free space left. Allocate a new chunk to satisfy the request.
100 // On failure the grow will panic or abort.
101 self.grow(bytes);
102 }
103 }
104
105 pub(crate) fn alloc_str<'a>(&'a self, string: &str) -> &'a mut str {
106 let alloc = self.alloc_raw(string.len());
107 let bytes = MaybeUninit::write_slice(alloc, string.as_bytes());
108
109 // SAFETY: we convert from `&str` to `&[u8]`, clone it into the arena,
110 // and immediately convert the clone back to `&str`.
111 unsafe { str::from_utf8_unchecked_mut(bytes) }
112 }
113}
114