1 | //! Memory allocation APIs |
2 | |
3 | #![stable (feature = "alloc_module" , since = "1.28.0" )] |
4 | |
5 | #[cfg (not(test))] |
6 | use core::hint; |
7 | |
8 | #[cfg (not(test))] |
9 | use core::ptr::{self, NonNull}; |
10 | |
11 | #[stable (feature = "alloc_module" , since = "1.28.0" )] |
12 | #[doc (inline)] |
13 | pub use core::alloc::*; |
14 | |
15 | #[cfg (test)] |
16 | mod tests; |
17 | |
18 | extern "Rust" { |
19 | // These are the magic symbols to call the global allocator. rustc generates |
20 | // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute |
21 | // (the code expanding that attribute macro generates those functions), or to call |
22 | // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`) |
23 | // otherwise. |
24 | // The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them |
25 | // like `malloc`, `realloc`, and `free`, respectively. |
26 | #[rustc_allocator ] |
27 | #[rustc_nounwind ] |
28 | fn __rust_alloc(size: usize, align: usize) -> *mut u8; |
29 | #[rustc_deallocator ] |
30 | #[rustc_nounwind ] |
31 | fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); |
32 | #[rustc_reallocator ] |
33 | #[rustc_nounwind ] |
34 | fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; |
35 | #[rustc_allocator_zeroed ] |
36 | #[rustc_nounwind ] |
37 | fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; |
38 | |
39 | static __rust_no_alloc_shim_is_unstable: u8; |
40 | } |
41 | |
42 | /// The global memory allocator. |
43 | /// |
44 | /// This type implements the [`Allocator`] trait by forwarding calls |
45 | /// to the allocator registered with the `#[global_allocator]` attribute |
46 | /// if there is one, or the `std` crate’s default. |
47 | /// |
48 | /// Note: while this type is unstable, the functionality it provides can be |
49 | /// accessed through the [free functions in `alloc`](self#functions). |
50 | #[unstable (feature = "allocator_api" , issue = "32838" )] |
51 | #[derive (Copy, Clone, Default, Debug)] |
52 | #[cfg (not(test))] |
53 | // the compiler needs to know when a Box uses the global allocator vs a custom one |
54 | #[lang = "global_alloc_ty" ] |
55 | pub struct Global; |
56 | |
57 | #[cfg (test)] |
58 | pub use std::alloc::Global; |
59 | |
60 | /// Allocate memory with the global allocator. |
61 | /// |
62 | /// This function forwards calls to the [`GlobalAlloc::alloc`] method |
63 | /// of the allocator registered with the `#[global_allocator]` attribute |
64 | /// if there is one, or the `std` crate’s default. |
65 | /// |
66 | /// This function is expected to be deprecated in favor of the `alloc` method |
67 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
68 | /// |
69 | /// # Safety |
70 | /// |
71 | /// See [`GlobalAlloc::alloc`]. |
72 | /// |
73 | /// # Examples |
74 | /// |
75 | /// ``` |
76 | /// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout}; |
77 | /// |
78 | /// unsafe { |
79 | /// let layout = Layout::new::<u16>(); |
80 | /// let ptr = alloc(layout); |
81 | /// if ptr.is_null() { |
82 | /// handle_alloc_error(layout); |
83 | /// } |
84 | /// |
85 | /// *(ptr as *mut u16) = 42; |
86 | /// assert_eq!(*(ptr as *mut u16), 42); |
87 | /// |
88 | /// dealloc(ptr, layout); |
89 | /// } |
90 | /// ``` |
91 | #[stable (feature = "global_alloc" , since = "1.28.0" )] |
92 | #[must_use = "losing the pointer will leak memory" ] |
93 | #[inline ] |
94 | pub unsafe fn alloc(layout: Layout) -> *mut u8 { |
95 | unsafe { |
96 | // Make sure we don't accidentally allow omitting the allocator shim in |
97 | // stable code until it is actually stabilized. |
98 | core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable); |
99 | |
100 | __rust_alloc(layout.size(), layout.align()) |
101 | } |
102 | } |
103 | |
104 | /// Deallocate memory with the global allocator. |
105 | /// |
106 | /// This function forwards calls to the [`GlobalAlloc::dealloc`] method |
107 | /// of the allocator registered with the `#[global_allocator]` attribute |
108 | /// if there is one, or the `std` crate’s default. |
109 | /// |
110 | /// This function is expected to be deprecated in favor of the `dealloc` method |
111 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
112 | /// |
113 | /// # Safety |
114 | /// |
115 | /// See [`GlobalAlloc::dealloc`]. |
116 | #[stable (feature = "global_alloc" , since = "1.28.0" )] |
117 | #[inline ] |
118 | pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) { |
119 | unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) } |
120 | } |
121 | |
122 | /// Reallocate memory with the global allocator. |
123 | /// |
124 | /// This function forwards calls to the [`GlobalAlloc::realloc`] method |
125 | /// of the allocator registered with the `#[global_allocator]` attribute |
126 | /// if there is one, or the `std` crate’s default. |
127 | /// |
128 | /// This function is expected to be deprecated in favor of the `realloc` method |
129 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
130 | /// |
131 | /// # Safety |
132 | /// |
133 | /// See [`GlobalAlloc::realloc`]. |
134 | #[stable (feature = "global_alloc" , since = "1.28.0" )] |
135 | #[must_use = "losing the pointer will leak memory" ] |
136 | #[inline ] |
137 | pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { |
138 | unsafe { __rust_realloc(ptr, old_size:layout.size(), layout.align(), new_size) } |
139 | } |
140 | |
141 | /// Allocate zero-initialized memory with the global allocator. |
142 | /// |
143 | /// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method |
144 | /// of the allocator registered with the `#[global_allocator]` attribute |
145 | /// if there is one, or the `std` crate’s default. |
146 | /// |
147 | /// This function is expected to be deprecated in favor of the `alloc_zeroed` method |
148 | /// of the [`Global`] type when it and the [`Allocator`] trait become stable. |
149 | /// |
150 | /// # Safety |
151 | /// |
152 | /// See [`GlobalAlloc::alloc_zeroed`]. |
153 | /// |
154 | /// # Examples |
155 | /// |
156 | /// ``` |
157 | /// use std::alloc::{alloc_zeroed, dealloc, Layout}; |
158 | /// |
159 | /// unsafe { |
160 | /// let layout = Layout::new::<u16>(); |
161 | /// let ptr = alloc_zeroed(layout); |
162 | /// |
163 | /// assert_eq!(*(ptr as *mut u16), 0); |
164 | /// |
165 | /// dealloc(ptr, layout); |
166 | /// } |
167 | /// ``` |
168 | #[stable (feature = "global_alloc" , since = "1.28.0" )] |
169 | #[must_use = "losing the pointer will leak memory" ] |
170 | #[inline ] |
171 | pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { |
172 | unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) } |
173 | } |
174 | |
175 | #[cfg (not(test))] |
176 | impl Global { |
177 | #[inline ] |
178 | fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> { |
179 | match layout.size() { |
180 | 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), |
181 | // SAFETY: `layout` is non-zero in size, |
182 | size => unsafe { |
183 | let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) }; |
184 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
185 | Ok(NonNull::slice_from_raw_parts(ptr, size)) |
186 | }, |
187 | } |
188 | } |
189 | |
190 | // SAFETY: Same as `Allocator::grow` |
191 | #[inline ] |
192 | unsafe fn grow_impl( |
193 | &self, |
194 | ptr: NonNull<u8>, |
195 | old_layout: Layout, |
196 | new_layout: Layout, |
197 | zeroed: bool, |
198 | ) -> Result<NonNull<[u8]>, AllocError> { |
199 | debug_assert!( |
200 | new_layout.size() >= old_layout.size(), |
201 | "`new_layout.size()` must be greater than or equal to `old_layout.size()`" |
202 | ); |
203 | |
204 | match old_layout.size() { |
205 | 0 => self.alloc_impl(new_layout, zeroed), |
206 | |
207 | // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size` |
208 | // as required by safety conditions. Other conditions must be upheld by the caller |
209 | old_size if old_layout.align() == new_layout.align() => unsafe { |
210 | let new_size = new_layout.size(); |
211 | |
212 | // `realloc` probably checks for `new_size >= old_layout.size()` or something similar. |
213 | hint::assert_unchecked(new_size >= old_layout.size()); |
214 | |
215 | let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); |
216 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
217 | if zeroed { |
218 | raw_ptr.add(old_size).write_bytes(0, new_size - old_size); |
219 | } |
220 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) |
221 | }, |
222 | |
223 | // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`, |
224 | // both the old and new memory allocation are valid for reads and writes for `old_size` |
225 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap |
226 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract |
227 | // for `dealloc` must be upheld by the caller. |
228 | old_size => unsafe { |
229 | let new_ptr = self.alloc_impl(new_layout, zeroed)?; |
230 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); |
231 | self.deallocate(ptr, old_layout); |
232 | Ok(new_ptr) |
233 | }, |
234 | } |
235 | } |
236 | } |
237 | |
238 | #[unstable (feature = "allocator_api" , issue = "32838" )] |
239 | #[cfg (not(test))] |
240 | unsafe impl Allocator for Global { |
241 | #[inline ] |
242 | fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
243 | self.alloc_impl(layout, false) |
244 | } |
245 | |
246 | #[inline ] |
247 | fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
248 | self.alloc_impl(layout, true) |
249 | } |
250 | |
251 | #[inline ] |
252 | unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { |
253 | if layout.size() != 0 { |
254 | // SAFETY: `layout` is non-zero in size, |
255 | // other conditions must be upheld by the caller |
256 | unsafe { dealloc(ptr.as_ptr(), layout) } |
257 | } |
258 | } |
259 | |
260 | #[inline ] |
261 | unsafe fn grow( |
262 | &self, |
263 | ptr: NonNull<u8>, |
264 | old_layout: Layout, |
265 | new_layout: Layout, |
266 | ) -> Result<NonNull<[u8]>, AllocError> { |
267 | // SAFETY: all conditions must be upheld by the caller |
268 | unsafe { self.grow_impl(ptr, old_layout, new_layout, false) } |
269 | } |
270 | |
271 | #[inline ] |
272 | unsafe fn grow_zeroed( |
273 | &self, |
274 | ptr: NonNull<u8>, |
275 | old_layout: Layout, |
276 | new_layout: Layout, |
277 | ) -> Result<NonNull<[u8]>, AllocError> { |
278 | // SAFETY: all conditions must be upheld by the caller |
279 | unsafe { self.grow_impl(ptr, old_layout, new_layout, true) } |
280 | } |
281 | |
282 | #[inline ] |
283 | unsafe fn shrink( |
284 | &self, |
285 | ptr: NonNull<u8>, |
286 | old_layout: Layout, |
287 | new_layout: Layout, |
288 | ) -> Result<NonNull<[u8]>, AllocError> { |
289 | debug_assert!( |
290 | new_layout.size() <= old_layout.size(), |
291 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" |
292 | ); |
293 | |
294 | match new_layout.size() { |
295 | // SAFETY: conditions must be upheld by the caller |
296 | 0 => unsafe { |
297 | self.deallocate(ptr, old_layout); |
298 | Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) |
299 | }, |
300 | |
301 | // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller |
302 | new_size if old_layout.align() == new_layout.align() => unsafe { |
303 | // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. |
304 | hint::assert_unchecked(new_size <= old_layout.size()); |
305 | |
306 | let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size); |
307 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
308 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) |
309 | }, |
310 | |
311 | // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, |
312 | // both the old and new memory allocation are valid for reads and writes for `new_size` |
313 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap |
314 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract |
315 | // for `dealloc` must be upheld by the caller. |
316 | new_size => unsafe { |
317 | let new_ptr = self.allocate(new_layout)?; |
318 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); |
319 | self.deallocate(ptr, old_layout); |
320 | Ok(new_ptr) |
321 | }, |
322 | } |
323 | } |
324 | } |
325 | |
326 | /// The allocator for unique pointers. |
327 | #[cfg (all(not(no_global_oom_handling), not(test)))] |
328 | #[lang = "exchange_malloc" ] |
329 | #[inline ] |
330 | unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { |
331 | let layout: Layout = unsafe { Layout::from_size_align_unchecked(size, align) }; |
332 | match Global.allocate(layout) { |
333 | Ok(ptr: NonNull<[u8]>) => ptr.as_mut_ptr(), |
334 | Err(_) => handle_alloc_error(layout), |
335 | } |
336 | } |
337 | |
338 | // # Allocation error handler |
339 | |
340 | #[cfg (not(no_global_oom_handling))] |
341 | extern "Rust" { |
342 | // This is the magic symbol to call the global alloc error handler. rustc generates |
343 | // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the |
344 | // default implementations below (`__rdl_oom`) otherwise. |
345 | fn __rust_alloc_error_handler(size: usize, align: usize) -> !; |
346 | } |
347 | |
348 | /// Signal a memory allocation error. |
349 | /// |
350 | /// Callers of memory allocation APIs wishing to cease execution |
351 | /// in response to an allocation error are encouraged to call this function, |
352 | /// rather than directly invoking [`panic!`] or similar. |
353 | /// |
354 | /// This function is guaranteed to diverge (not return normally with a value), but depending on |
355 | /// global configuration, it may either panic (resulting in unwinding or aborting as per |
356 | /// configuration for all panics), or abort the process (with no unwinding). |
357 | /// |
358 | /// The default behavior is: |
359 | /// |
360 | /// * If the binary links against `std` (typically the case), then |
361 | /// print a message to standard error and abort the process. |
362 | /// This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`]. |
363 | /// Future versions of Rust may panic by default instead. |
364 | /// |
365 | /// * If the binary does not link against `std` (all of its crates are marked |
366 | /// [`#![no_std]`][no_std]), then call [`panic!`] with a message. |
367 | /// [The panic handler] applies as to any panic. |
368 | /// |
369 | /// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html |
370 | /// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html |
371 | /// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute |
372 | /// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute |
373 | #[stable (feature = "global_alloc" , since = "1.28.0" )] |
374 | #[rustc_const_unstable (feature = "const_alloc_error" , issue = "92523" )] |
375 | #[cfg (all(not(no_global_oom_handling), not(test)))] |
376 | #[cold ] |
377 | pub const fn handle_alloc_error(layout: Layout) -> ! { |
378 | const fn ct_error(_: Layout) -> ! { |
379 | panic!("allocation failed" ); |
380 | } |
381 | |
382 | #[inline ] |
383 | fn rt_error(layout: Layout) -> ! { |
384 | unsafe { |
385 | __rust_alloc_error_handler(layout.size(), layout.align()); |
386 | } |
387 | } |
388 | |
389 | #[cfg (not(feature = "panic_immediate_abort" ))] |
390 | { |
391 | core::intrinsics::const_eval_select((layout,), _called_in_const:ct_error, _called_at_rt:rt_error) |
392 | } |
393 | |
394 | #[cfg (feature = "panic_immediate_abort" )] |
395 | ct_error(layout) |
396 | } |
397 | |
398 | // For alloc test `std::alloc::handle_alloc_error` can be used directly. |
399 | #[cfg (all(not(no_global_oom_handling), test))] |
400 | pub use std::alloc::handle_alloc_error; |
401 | |
402 | #[cfg (all(not(no_global_oom_handling), not(test)))] |
403 | #[doc (hidden)] |
404 | #[allow (unused_attributes)] |
405 | #[unstable (feature = "alloc_internals" , issue = "none" )] |
406 | pub mod __alloc_error_handler { |
407 | // called via generated `__rust_alloc_error_handler` if there is no |
408 | // `#[alloc_error_handler]`. |
409 | #[rustc_std_internal_symbol ] |
410 | pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! { |
411 | extern "Rust" { |
412 | // This symbol is emitted by rustc next to __rust_alloc_error_handler. |
413 | // Its value depends on the -Zoom={panic,abort} compiler option. |
414 | static __rust_alloc_error_handler_should_panic: u8; |
415 | } |
416 | |
417 | if unsafe { __rust_alloc_error_handler_should_panic != 0 } { |
418 | panic!("memory allocation of {size} bytes failed" ) |
419 | } else { |
420 | core::panicking::panic_nounwind_fmt( |
421 | fmt:format_args!("memory allocation of {size} bytes failed" ), |
422 | /* force_no_backtrace */ force_no_backtrace:false, |
423 | ) |
424 | } |
425 | } |
426 | } |
427 | |
428 | #[cfg (not(no_global_oom_handling))] |
429 | /// Specialize clones into pre-allocated, uninitialized memory. |
430 | /// Used by `Box::clone` and `Rc`/`Arc::make_mut`. |
431 | pub(crate) trait WriteCloneIntoRaw: Sized { |
432 | unsafe fn write_clone_into_raw(&self, target: *mut Self); |
433 | } |
434 | |
435 | #[cfg (not(no_global_oom_handling))] |
436 | impl<T: Clone> WriteCloneIntoRaw for T { |
437 | #[inline ] |
438 | default unsafe fn write_clone_into_raw(&self, target: *mut Self) { |
439 | // Having allocated *first* may allow the optimizer to create |
440 | // the cloned value in-place, skipping the local and move. |
441 | unsafe { target.write(self.clone()) }; |
442 | } |
443 | } |
444 | |
445 | #[cfg (not(no_global_oom_handling))] |
446 | impl<T: Copy> WriteCloneIntoRaw for T { |
447 | #[inline ] |
448 | unsafe fn write_clone_into_raw(&self, target: *mut Self) { |
449 | // We can always copy in-place, without ever involving a local value. |
450 | unsafe { target.copy_from_nonoverlapping(self, count:1) }; |
451 | } |
452 | } |
453 | |