| 1 | use core::ptr::NonNull;
|
| 2 |
|
| 3 | use alloc_crate::alloc::{alloc, alloc_zeroed, dealloc, realloc};
|
| 4 |
|
| 5 | use crate::stable::{assume, invalid_mut};
|
| 6 |
|
| 7 | use super::{AllocError, Allocator, Layout};
|
| 8 |
|
| 9 | /// The global memory allocator.
|
| 10 | ///
|
| 11 | /// This type implements the [`Allocator`] trait by forwarding calls
|
| 12 | /// to the allocator registered with the `#[global_allocator]` attribute
|
| 13 | /// if there is one, or the `std` crate’s default.
|
| 14 | ///
|
| 15 | /// Note: while this type is unstable, the functionality it provides can be
|
| 16 | /// accessed through the [free functions in `alloc`](crate#functions).
|
| 17 | #[derive (Copy, Clone, Default, Debug)]
|
| 18 | pub struct Global;
|
| 19 |
|
| 20 | impl Global {
|
| 21 | #[inline (always)]
|
| 22 | fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
|
| 23 | match layout.size() {
|
| 24 | 0 => Ok(unsafe {
|
| 25 | NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
|
| 26 | invalid_mut(layout.align()),
|
| 27 | 0,
|
| 28 | ))
|
| 29 | }),
|
| 30 | // SAFETY: `layout` is non-zero in size,
|
| 31 | size => unsafe {
|
| 32 | let raw_ptr = if zeroed {
|
| 33 | alloc_zeroed(layout)
|
| 34 | } else {
|
| 35 | alloc(layout)
|
| 36 | };
|
| 37 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
|
| 38 | Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
|
| 39 | ptr.as_ptr(),
|
| 40 | size,
|
| 41 | )))
|
| 42 | },
|
| 43 | }
|
| 44 | }
|
| 45 |
|
| 46 | // SAFETY: Same as `Allocator::grow`
|
| 47 | #[inline (always)]
|
| 48 | unsafe fn grow_impl(
|
| 49 | &self,
|
| 50 | ptr: NonNull<u8>,
|
| 51 | old_layout: Layout,
|
| 52 | new_layout: Layout,
|
| 53 | zeroed: bool,
|
| 54 | ) -> Result<NonNull<[u8]>, AllocError> {
|
| 55 | debug_assert!(
|
| 56 | new_layout.size() >= old_layout.size(),
|
| 57 | "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
|
| 58 | );
|
| 59 |
|
| 60 | match old_layout.size() {
|
| 61 | 0 => self.alloc_impl(new_layout, zeroed),
|
| 62 |
|
| 63 | // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
|
| 64 | // as required by safety conditions. Other conditions must be upheld by the caller
|
| 65 | old_size if old_layout.align() == new_layout.align() => unsafe {
|
| 66 | let new_size = new_layout.size();
|
| 67 |
|
| 68 | // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
|
| 69 | assume(new_size >= old_layout.size());
|
| 70 |
|
| 71 | let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
|
| 72 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
|
| 73 | if zeroed {
|
| 74 | raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
|
| 75 | }
|
| 76 | Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
|
| 77 | ptr.as_ptr(),
|
| 78 | new_size,
|
| 79 | )))
|
| 80 | },
|
| 81 |
|
| 82 | // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
|
| 83 | // both the old and new memory allocation are valid for reads and writes for `old_size`
|
| 84 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
|
| 85 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
|
| 86 | // for `dealloc` must be upheld by the caller.
|
| 87 | old_size => unsafe {
|
| 88 | let new_ptr = self.alloc_impl(new_layout, zeroed)?;
|
| 89 | core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_size);
|
| 90 | self.deallocate(ptr, old_layout);
|
| 91 | Ok(new_ptr)
|
| 92 | },
|
| 93 | }
|
| 94 | }
|
| 95 | }
|
| 96 |
|
| 97 | unsafe impl Allocator for Global {
|
| 98 | #[inline (always)]
|
| 99 | fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
| 100 | self.alloc_impl(layout, false)
|
| 101 | }
|
| 102 |
|
| 103 | #[inline (always)]
|
| 104 | fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
| 105 | self.alloc_impl(layout, true)
|
| 106 | }
|
| 107 |
|
| 108 | #[inline (always)]
|
| 109 | unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
|
| 110 | if layout.size() != 0 {
|
| 111 | // SAFETY: `layout` is non-zero in size,
|
| 112 | // other conditions must be upheld by the caller
|
| 113 | unsafe { dealloc(ptr.as_ptr(), layout) }
|
| 114 | }
|
| 115 | }
|
| 116 |
|
| 117 | #[inline (always)]
|
| 118 | unsafe fn grow(
|
| 119 | &self,
|
| 120 | ptr: NonNull<u8>,
|
| 121 | old_layout: Layout,
|
| 122 | new_layout: Layout,
|
| 123 | ) -> Result<NonNull<[u8]>, AllocError> {
|
| 124 | // SAFETY: all conditions must be upheld by the caller
|
| 125 | unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
|
| 126 | }
|
| 127 |
|
| 128 | #[inline (always)]
|
| 129 | unsafe fn grow_zeroed(
|
| 130 | &self,
|
| 131 | ptr: NonNull<u8>,
|
| 132 | old_layout: Layout,
|
| 133 | new_layout: Layout,
|
| 134 | ) -> Result<NonNull<[u8]>, AllocError> {
|
| 135 | // SAFETY: all conditions must be upheld by the caller
|
| 136 | unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
|
| 137 | }
|
| 138 |
|
| 139 | #[inline (always)]
|
| 140 | unsafe fn shrink(
|
| 141 | &self,
|
| 142 | ptr: NonNull<u8>,
|
| 143 | old_layout: Layout,
|
| 144 | new_layout: Layout,
|
| 145 | ) -> Result<NonNull<[u8]>, AllocError> {
|
| 146 | debug_assert!(
|
| 147 | new_layout.size() <= old_layout.size(),
|
| 148 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
|
| 149 | );
|
| 150 |
|
| 151 | match new_layout.size() {
|
| 152 | // SAFETY: conditions must be upheld by the caller
|
| 153 | 0 => unsafe {
|
| 154 | self.deallocate(ptr, old_layout);
|
| 155 | Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
|
| 156 | invalid_mut(new_layout.align()),
|
| 157 | 0,
|
| 158 | )))
|
| 159 | },
|
| 160 |
|
| 161 | // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
|
| 162 | new_size if old_layout.align() == new_layout.align() => unsafe {
|
| 163 | // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
|
| 164 | assume(new_size <= old_layout.size());
|
| 165 |
|
| 166 | let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
|
| 167 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
|
| 168 | Ok(NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(
|
| 169 | ptr.as_ptr(),
|
| 170 | new_size,
|
| 171 | )))
|
| 172 | },
|
| 173 |
|
| 174 | // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
|
| 175 | // both the old and new memory allocation are valid for reads and writes for `new_size`
|
| 176 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
|
| 177 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
|
| 178 | // for `dealloc` must be upheld by the caller.
|
| 179 | new_size => unsafe {
|
| 180 | let new_ptr = self.allocate(new_layout)?;
|
| 181 | core::ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), new_size);
|
| 182 | self.deallocate(ptr, old_layout);
|
| 183 | Ok(new_ptr)
|
| 184 | },
|
| 185 | }
|
| 186 | }
|
| 187 | }
|
| 188 | |