1 | //! Memory allocation APIs. |
2 | //! |
3 | //! In a given program, the standard library has one “global” memory allocator |
4 | //! that is used for example by `Box<T>` and `Vec<T>`. |
5 | //! |
6 | //! Currently the default global allocator is unspecified. Libraries, however, |
7 | //! like `cdylib`s and `staticlib`s are guaranteed to use the [`System`] by |
8 | //! default. |
9 | //! |
10 | //! # The `#[global_allocator]` attribute |
11 | //! |
12 | //! This attribute allows configuring the choice of global allocator. |
13 | //! You can use this to implement a completely custom global allocator |
14 | //! to route all default allocation requests to a custom object. |
15 | //! |
16 | //! ```rust |
17 | //! use std::alloc::{GlobalAlloc, System, Layout}; |
18 | //! |
19 | //! struct MyAllocator; |
20 | //! |
21 | //! unsafe impl GlobalAlloc for MyAllocator { |
22 | //! unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
23 | //! System.alloc(layout) |
24 | //! } |
25 | //! |
26 | //! unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { |
27 | //! System.dealloc(ptr, layout) |
28 | //! } |
29 | //! } |
30 | //! |
31 | //! #[global_allocator] |
32 | //! static GLOBAL: MyAllocator = MyAllocator; |
33 | //! |
34 | //! fn main() { |
35 | //! // This `Vec` will allocate memory through `GLOBAL` above |
36 | //! let mut v = Vec::new(); |
37 | //! v.push(1); |
38 | //! } |
39 | //! ``` |
40 | //! |
41 | //! The attribute is used on a `static` item whose type implements the |
42 | //! [`GlobalAlloc`] trait. This type can be provided by an external library: |
43 | //! |
44 | //! ```rust,ignore (demonstrates crates.io usage) |
45 | //! use jemallocator::Jemalloc; |
46 | //! |
47 | //! #[global_allocator] |
48 | //! static GLOBAL: Jemalloc = Jemalloc; |
49 | //! |
50 | //! fn main() {} |
51 | //! ``` |
52 | //! |
53 | //! The `#[global_allocator]` can only be used once in a crate |
54 | //! or its recursive dependencies. |
55 | |
56 | #![deny (unsafe_op_in_unsafe_fn)] |
57 | #![stable (feature = "alloc_module" , since = "1.28.0" )] |
58 | |
59 | use core::hint; |
60 | use core::ptr::NonNull; |
61 | use core::sync::atomic::{AtomicPtr, Ordering}; |
62 | use core::{mem, ptr}; |
63 | |
64 | #[stable (feature = "alloc_module" , since = "1.28.0" )] |
65 | #[doc (inline)] |
66 | pub use alloc_crate::alloc::*; |
67 | |
68 | /// The default memory allocator provided by the operating system. |
69 | /// |
70 | /// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows, |
71 | /// plus related functions. However, it is not valid to mix use of the backing |
72 | /// system allocator with `System`, as this implementation may include extra |
73 | /// work, such as to serve alignment requests greater than the alignment |
74 | /// provided directly by the backing system allocator. |
75 | /// |
76 | /// This type implements the `GlobalAlloc` trait and Rust programs by default |
77 | /// work as if they had this definition: |
78 | /// |
79 | /// ```rust |
80 | /// use std::alloc::System; |
81 | /// |
82 | /// #[global_allocator] |
83 | /// static A: System = System; |
84 | /// |
85 | /// fn main() { |
86 | /// let a = Box::new(4); // Allocates from the system allocator. |
87 | /// println!("{a}" ); |
88 | /// } |
89 | /// ``` |
90 | /// |
91 | /// You can also define your own wrapper around `System` if you'd like, such as |
92 | /// keeping track of the number of all bytes allocated: |
93 | /// |
94 | /// ```rust |
95 | /// use std::alloc::{System, GlobalAlloc, Layout}; |
96 | /// use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; |
97 | /// |
98 | /// struct Counter; |
99 | /// |
100 | /// static ALLOCATED: AtomicUsize = AtomicUsize::new(0); |
101 | /// |
102 | /// unsafe impl GlobalAlloc for Counter { |
103 | /// unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
104 | /// let ret = System.alloc(layout); |
105 | /// if !ret.is_null() { |
106 | /// ALLOCATED.fetch_add(layout.size(), Relaxed); |
107 | /// } |
108 | /// ret |
109 | /// } |
110 | /// |
111 | /// unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { |
112 | /// System.dealloc(ptr, layout); |
113 | /// ALLOCATED.fetch_sub(layout.size(), Relaxed); |
114 | /// } |
115 | /// } |
116 | /// |
117 | /// #[global_allocator] |
118 | /// static A: Counter = Counter; |
119 | /// |
120 | /// fn main() { |
121 | /// println!("allocated bytes before main: {}" , ALLOCATED.load(Relaxed)); |
122 | /// } |
123 | /// ``` |
124 | /// |
125 | /// It can also be used directly to allocate memory independently of whatever |
126 | /// global allocator has been selected for a Rust program. For example if a Rust |
127 | /// program opts in to using jemalloc as the global allocator, `System` will |
128 | /// still allocate memory using `malloc` and `HeapAlloc`. |
129 | #[stable (feature = "alloc_system_type" , since = "1.28.0" )] |
130 | #[derive (Debug, Default, Copy, Clone)] |
131 | pub struct System; |
132 | |
133 | impl System { |
134 | #[inline ] |
135 | fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> { |
136 | match layout.size() { |
137 | 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)), |
138 | // SAFETY: `layout` is non-zero in size, |
139 | size => unsafe { |
140 | let raw_ptr = if zeroed { |
141 | GlobalAlloc::alloc_zeroed(self, layout) |
142 | } else { |
143 | GlobalAlloc::alloc(self, layout) |
144 | }; |
145 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
146 | Ok(NonNull::slice_from_raw_parts(ptr, size)) |
147 | }, |
148 | } |
149 | } |
150 | |
151 | // SAFETY: Same as `Allocator::grow` |
152 | #[inline ] |
153 | unsafe fn grow_impl( |
154 | &self, |
155 | ptr: NonNull<u8>, |
156 | old_layout: Layout, |
157 | new_layout: Layout, |
158 | zeroed: bool, |
159 | ) -> Result<NonNull<[u8]>, AllocError> { |
160 | debug_assert!( |
161 | new_layout.size() >= old_layout.size(), |
162 | "`new_layout.size()` must be greater than or equal to `old_layout.size()`" |
163 | ); |
164 | |
165 | match old_layout.size() { |
166 | 0 => self.alloc_impl(new_layout, zeroed), |
167 | |
168 | // SAFETY: `new_size` is non-zero as `new_size` is greater than or equal to `old_size` |
169 | // as required by safety conditions and the `old_size == 0` case was handled in the |
170 | // previous match arm. Other conditions must be upheld by the caller |
171 | old_size if old_layout.align() == new_layout.align() => unsafe { |
172 | let new_size = new_layout.size(); |
173 | |
174 | // `realloc` probably checks for `new_size >= old_layout.size()` or something similar. |
175 | hint::assert_unchecked(new_size >= old_layout.size()); |
176 | |
177 | let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size); |
178 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
179 | if zeroed { |
180 | raw_ptr.add(old_size).write_bytes(0, new_size - old_size); |
181 | } |
182 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) |
183 | }, |
184 | |
185 | // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`, |
186 | // both the old and new memory allocation are valid for reads and writes for `old_size` |
187 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap |
188 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract |
189 | // for `dealloc` must be upheld by the caller. |
190 | old_size => unsafe { |
191 | let new_ptr = self.alloc_impl(new_layout, zeroed)?; |
192 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); |
193 | Allocator::deallocate(self, ptr, old_layout); |
194 | Ok(new_ptr) |
195 | }, |
196 | } |
197 | } |
198 | } |
199 | |
200 | // The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, |
201 | // which is in `std::sys::*::alloc`. |
202 | #[unstable (feature = "allocator_api" , issue = "32838" )] |
203 | unsafe impl Allocator for System { |
204 | #[inline ] |
205 | fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
206 | self.alloc_impl(layout, false) |
207 | } |
208 | |
209 | #[inline ] |
210 | fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
211 | self.alloc_impl(layout, true) |
212 | } |
213 | |
214 | #[inline ] |
215 | unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { |
216 | if layout.size() != 0 { |
217 | // SAFETY: `layout` is non-zero in size, |
218 | // other conditions must be upheld by the caller |
219 | unsafe { GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) } |
220 | } |
221 | } |
222 | |
223 | #[inline ] |
224 | unsafe fn grow( |
225 | &self, |
226 | ptr: NonNull<u8>, |
227 | old_layout: Layout, |
228 | new_layout: Layout, |
229 | ) -> Result<NonNull<[u8]>, AllocError> { |
230 | // SAFETY: all conditions must be upheld by the caller |
231 | unsafe { self.grow_impl(ptr, old_layout, new_layout, false) } |
232 | } |
233 | |
234 | #[inline ] |
235 | unsafe fn grow_zeroed( |
236 | &self, |
237 | ptr: NonNull<u8>, |
238 | old_layout: Layout, |
239 | new_layout: Layout, |
240 | ) -> Result<NonNull<[u8]>, AllocError> { |
241 | // SAFETY: all conditions must be upheld by the caller |
242 | unsafe { self.grow_impl(ptr, old_layout, new_layout, true) } |
243 | } |
244 | |
245 | #[inline ] |
246 | unsafe fn shrink( |
247 | &self, |
248 | ptr: NonNull<u8>, |
249 | old_layout: Layout, |
250 | new_layout: Layout, |
251 | ) -> Result<NonNull<[u8]>, AllocError> { |
252 | debug_assert!( |
253 | new_layout.size() <= old_layout.size(), |
254 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" |
255 | ); |
256 | |
257 | match new_layout.size() { |
258 | // SAFETY: conditions must be upheld by the caller |
259 | 0 => unsafe { |
260 | Allocator::deallocate(self, ptr, old_layout); |
261 | Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0)) |
262 | }, |
263 | |
264 | // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller |
265 | new_size if old_layout.align() == new_layout.align() => unsafe { |
266 | // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. |
267 | hint::assert_unchecked(new_size <= old_layout.size()); |
268 | |
269 | let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size); |
270 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
271 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) |
272 | }, |
273 | |
274 | // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, |
275 | // both the old and new memory allocation are valid for reads and writes for `new_size` |
276 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap |
277 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract |
278 | // for `dealloc` must be upheld by the caller. |
279 | new_size => unsafe { |
280 | let new_ptr = Allocator::allocate(self, new_layout)?; |
281 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); |
282 | Allocator::deallocate(self, ptr, old_layout); |
283 | Ok(new_ptr) |
284 | }, |
285 | } |
286 | } |
287 | } |
288 | |
289 | static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut()); |
290 | |
291 | /// Registers a custom allocation error hook, replacing any that was previously registered. |
292 | /// |
293 | /// The allocation error hook is invoked when an infallible memory allocation fails — that is, |
294 | /// as a consequence of calling [`handle_alloc_error`] — before the runtime aborts. |
295 | /// |
296 | /// The allocation error hook is a global resource. [`take_alloc_error_hook`] may be used to |
297 | /// retrieve a previously registered hook and wrap or discard it. |
298 | /// |
299 | /// # What the provided `hook` function should expect |
300 | /// |
301 | /// The hook function is provided with a [`Layout`] struct which contains information |
302 | /// about the allocation that failed. |
303 | /// |
304 | /// The hook function may choose to panic or abort; in the event that it returns normally, this |
305 | /// will cause an immediate abort. |
306 | /// |
307 | /// Since [`take_alloc_error_hook`] is a safe function that allows retrieving the hook, the hook |
308 | /// function must be _sound_ to call even if no memory allocations were attempted. |
309 | /// |
310 | /// # The default hook |
311 | /// |
312 | /// The default hook, used if [`set_alloc_error_hook`] is never called, prints a message to |
313 | /// standard error (and then returns, causing the runtime to abort the process). |
314 | /// Compiler options may cause it to panic instead, and the default behavior may be changed |
315 | /// to panicking in future versions of Rust. |
316 | /// |
317 | /// # Examples |
318 | /// |
319 | /// ``` |
320 | /// #![feature(alloc_error_hook)] |
321 | /// |
322 | /// use std::alloc::{Layout, set_alloc_error_hook}; |
323 | /// |
324 | /// fn custom_alloc_error_hook(layout: Layout) { |
325 | /// panic!("memory allocation of {} bytes failed" , layout.size()); |
326 | /// } |
327 | /// |
328 | /// set_alloc_error_hook(custom_alloc_error_hook); |
329 | /// ``` |
330 | #[unstable (feature = "alloc_error_hook" , issue = "51245" )] |
331 | pub fn set_alloc_error_hook(hook: fn(Layout)) { |
332 | HOOK.store(ptr:hook as *mut (), order:Ordering::Release); |
333 | } |
334 | |
335 | /// Unregisters the current allocation error hook, returning it. |
336 | /// |
337 | /// *See also the function [`set_alloc_error_hook`].* |
338 | /// |
339 | /// If no custom hook is registered, the default hook will be returned. |
340 | #[unstable (feature = "alloc_error_hook" , issue = "51245" )] |
341 | pub fn take_alloc_error_hook() -> fn(Layout) { |
342 | let hook: *mut () = HOOK.swap(ptr:ptr::null_mut(), order:Ordering::Acquire); |
343 | if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(src:hook) } } |
344 | } |
345 | |
346 | fn default_alloc_error_hook(layout: Layout) { |
347 | extern "Rust" { |
348 | // This symbol is emitted by rustc next to __rust_alloc_error_handler. |
349 | // Its value depends on the -Zoom={panic,abort} compiler option. |
350 | static __rust_alloc_error_handler_should_panic: u8; |
351 | } |
352 | |
353 | if unsafe { __rust_alloc_error_handler_should_panic != 0 } { |
354 | panic!("memory allocation of {} bytes failed" , layout.size()); |
355 | } else { |
356 | rtprintpanic!("memory allocation of {} bytes failed \n" , layout.size()); |
357 | } |
358 | } |
359 | |
360 | #[cfg (not(test))] |
361 | #[doc (hidden)] |
362 | #[alloc_error_handler] |
363 | #[unstable (feature = "alloc_internals" , issue = "none" )] |
364 | pub fn rust_oom(layout: Layout) -> ! { |
365 | let hook: *mut () = HOOK.load(order:Ordering::Acquire); |
366 | let hook: fn(Layout) = |
367 | if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(src:hook) } }; |
368 | hook(layout); |
369 | crate::process::abort() |
370 | } |
371 | |
372 | #[cfg (not(test))] |
373 | #[doc (hidden)] |
374 | #[allow (unused_attributes)] |
375 | #[unstable (feature = "alloc_internals" , issue = "none" )] |
376 | pub mod __default_lib_allocator { |
377 | use super::{GlobalAlloc, Layout, System}; |
378 | // These magic symbol names are used as a fallback for implementing the |
379 | // `__rust_alloc` etc symbols (see `src/liballoc/alloc.rs`) when there is |
380 | // no `#[global_allocator]` attribute. |
381 | |
382 | // for symbol names src/librustc_ast/expand/allocator.rs |
383 | // for signatures src/librustc_allocator/lib.rs |
384 | |
385 | // linkage directives are provided as part of the current compiler allocator |
386 | // ABI |
387 | |
388 | #[rustc_std_internal_symbol ] |
389 | pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 { |
390 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
391 | // `GlobalAlloc::alloc`. |
392 | unsafe { |
393 | let layout = Layout::from_size_align_unchecked(size, align); |
394 | System.alloc(layout) |
395 | } |
396 | } |
397 | |
398 | #[rustc_std_internal_symbol ] |
399 | pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) { |
400 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
401 | // `GlobalAlloc::dealloc`. |
402 | unsafe { System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) } |
403 | } |
404 | |
405 | #[rustc_std_internal_symbol ] |
406 | pub unsafe extern "C" fn __rdl_realloc( |
407 | ptr: *mut u8, |
408 | old_size: usize, |
409 | align: usize, |
410 | new_size: usize, |
411 | ) -> *mut u8 { |
412 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
413 | // `GlobalAlloc::realloc`. |
414 | unsafe { |
415 | let old_layout = Layout::from_size_align_unchecked(old_size, align); |
416 | System.realloc(ptr, old_layout, new_size) |
417 | } |
418 | } |
419 | |
420 | #[rustc_std_internal_symbol ] |
421 | pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 { |
422 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
423 | // `GlobalAlloc::alloc_zeroed`. |
424 | unsafe { |
425 | let layout = Layout::from_size_align_unchecked(size, align); |
426 | System.alloc_zeroed(layout) |
427 | } |
428 | } |
429 | } |
430 | |