1use core::ptr::NonNull;
2
3#[doc(inline)]
4pub use alloc_crate::alloc::{alloc, alloc_zeroed, dealloc, handle_alloc_error, realloc};
5
6use crate::stable::{assume, invalid_mut};
7
8use super::{AllocError, Allocator, Layout};
9
10/// The global memory allocator.
11///
12/// This type implements the [`Allocator`] trait by forwarding calls
13/// to the allocator registered with the `#[global_allocator]` attribute
14/// if there is one, or the `std` crate’s default.
15///
16/// Note: while this type is unstable, the functionality it provides can be
17/// accessed through the [free functions in `alloc`](crate#functions).
18#[derive(Copy, Clone, Default, Debug)]
19pub struct Global;
20
21impl Global {
22 #[inline(always)]
23 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
24 match layout.size() {
25 0 => Ok(unsafe {
26 NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
27 invalid_mut(layout.align()),
28 0,
29 ))
30 }),
31 // SAFETY: `layout` is non-zero in size,
32 size => unsafe {
33 let raw_ptr = if zeroed {
34 alloc_zeroed(layout)
35 } else {
36 alloc(layout)
37 };
38 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
39 Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
40 ptr.as_ptr(),
41 size,
42 )))
43 },
44 }
45 }
46
47 // SAFETY: Same as `Allocator::grow`
48 #[inline(always)]
49 unsafe fn grow_impl(
50 &self,
51 ptr: NonNull<u8>,
52 old_layout: Layout,
53 new_layout: Layout,
54 zeroed: bool,
55 ) -> Result<NonNull<[u8]>, AllocError> {
56 debug_assert!(
57 new_layout.size() >= old_layout.size(),
58 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
59 );
60
61 match old_layout.size() {
62 0 => self.alloc_impl(new_layout, zeroed),
63
64 // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
65 // as required by safety conditions. Other conditions must be upheld by the caller
66 old_size if old_layout.align() == new_layout.align() => unsafe {
67 let new_size = new_layout.size();
68
69 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
70 assume(new_size >= old_layout.size());
71
72 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
73 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
74 if zeroed {
75 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
76 }
77 Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
78 ptr.as_ptr(),
79 new_size,
80 )))
81 },
82
83 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
84 // both the old and new memory allocation are valid for reads and writes for `old_size`
85 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
86 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
87 // for `dealloc` must be upheld by the caller.
88 old_size => unsafe {
89 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
90 core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_size);
91 self.deallocate(ptr, old_layout);
92 Ok(new_ptr)
93 },
94 }
95 }
96}
97
98unsafe impl Allocator for Global {
99 #[inline(always)]
100 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
101 self.alloc_impl(layout, false)
102 }
103
104 #[inline(always)]
105 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
106 self.alloc_impl(layout, true)
107 }
108
109 #[inline(always)]
110 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
111 if layout.size() != 0 {
112 // SAFETY: `layout` is non-zero in size,
113 // other conditions must be upheld by the caller
114 unsafe { dealloc(ptr.as_ptr(), layout) }
115 }
116 }
117
118 #[inline(always)]
119 unsafe fn grow(
120 &self,
121 ptr: NonNull<u8>,
122 old_layout: Layout,
123 new_layout: Layout,
124 ) -> Result<NonNull<[u8]>, AllocError> {
125 // SAFETY: all conditions must be upheld by the caller
126 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
127 }
128
129 #[inline(always)]
130 unsafe fn grow_zeroed(
131 &self,
132 ptr: NonNull<u8>,
133 old_layout: Layout,
134 new_layout: Layout,
135 ) -> Result<NonNull<[u8]>, AllocError> {
136 // SAFETY: all conditions must be upheld by the caller
137 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
138 }
139
140 #[inline(always)]
141 unsafe fn shrink(
142 &self,
143 ptr: NonNull<u8>,
144 old_layout: Layout,
145 new_layout: Layout,
146 ) -> Result<NonNull<[u8]>, AllocError> {
147 debug_assert!(
148 new_layout.size() <= old_layout.size(),
149 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
150 );
151
152 match new_layout.size() {
153 // SAFETY: conditions must be upheld by the caller
154 0 => unsafe {
155 self.deallocate(ptr, old_layout);
156 Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
157 invalid_mut(new_layout.align()),
158 0,
159 )))
160 },
161
162 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
163 new_size if old_layout.align() == new_layout.align() => unsafe {
164 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
165 assume(new_size <= old_layout.size());
166
167 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
168 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
169 Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
170 ptr.as_ptr(),
171 new_size,
172 )))
173 },
174
175 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
176 // both the old and new memory allocation are valid for reads and writes for `new_size`
177 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
178 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
179 // for `dealloc` must be upheld by the caller.
180 new_size => unsafe {
181 let new_ptr = self.allocate(new_layout)?;
182 core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), new_size);
183 self.deallocate(ptr, old_layout);
184 Ok(new_ptr)
185 },
186 }
187 }
188}
189