| 1 | //! Memory allocation APIs. |
| 2 | //! |
| 3 | //! In a given program, the standard library has one “global” memory allocator |
| 4 | //! that is used for example by `Box<T>` and `Vec<T>`. |
| 5 | //! |
| 6 | //! Currently the default global allocator is unspecified. Libraries, however, |
| 7 | //! like `cdylib`s and `staticlib`s are guaranteed to use the [`System`] by |
| 8 | //! default. |
| 9 | //! |
| 10 | //! # The `#[global_allocator]` attribute |
| 11 | //! |
| 12 | //! This attribute allows configuring the choice of global allocator. |
| 13 | //! You can use this to implement a completely custom global allocator |
| 14 | //! to route all[^system-alloc] default allocation requests to a custom object. |
| 15 | //! |
| 16 | //! ```rust |
| 17 | //! use std::alloc::{GlobalAlloc, System, Layout}; |
| 18 | //! |
| 19 | //! struct MyAllocator; |
| 20 | //! |
| 21 | //! unsafe impl GlobalAlloc for MyAllocator { |
| 22 | //! unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
| 23 | //! unsafe { System.alloc(layout) } |
| 24 | //! } |
| 25 | //! |
| 26 | //! unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { |
| 27 | //! unsafe { System.dealloc(ptr, layout) } |
| 28 | //! } |
| 29 | //! } |
| 30 | //! |
| 31 | //! #[global_allocator] |
| 32 | //! static GLOBAL: MyAllocator = MyAllocator; |
| 33 | //! |
| 34 | //! fn main() { |
| 35 | //! // This `Vec` will allocate memory through `GLOBAL` above |
| 36 | //! let mut v = Vec::new(); |
| 37 | //! v.push(1); |
| 38 | //! } |
| 39 | //! ``` |
| 40 | //! |
| 41 | //! The attribute is used on a `static` item whose type implements the |
| 42 | //! [`GlobalAlloc`] trait. This type can be provided by an external library: |
| 43 | //! |
| 44 | //! ```rust,ignore (demonstrates crates.io usage) |
| 45 | //! use jemallocator::Jemalloc; |
| 46 | //! |
| 47 | //! #[global_allocator] |
| 48 | //! static GLOBAL: Jemalloc = Jemalloc; |
| 49 | //! |
| 50 | //! fn main() {} |
| 51 | //! ``` |
| 52 | //! |
| 53 | //! The `#[global_allocator]` can only be used once in a crate |
| 54 | //! or its recursive dependencies. |
| 55 | //! |
| 56 | //! [^system-alloc]: Note that the Rust standard library internals may still |
| 57 | //! directly call [`System`] when necessary (for example for the runtime |
| 58 | //! support typically required to implement a global allocator, see [re-entrance] on [`GlobalAlloc`] |
| 59 | //! for more details). |
| 60 | //! |
| 61 | //! [re-entrance]: trait.GlobalAlloc.html#re-entrance |
| 62 | |
| 63 | #![deny (unsafe_op_in_unsafe_fn)] |
| 64 | #![stable (feature = "alloc_module" , since = "1.28.0" )] |
| 65 | |
| 66 | use core::ptr::NonNull; |
| 67 | use core::sync::atomic::{AtomicBool, AtomicPtr, Ordering}; |
| 68 | use core::{hint, mem, ptr}; |
| 69 | |
| 70 | #[stable (feature = "alloc_module" , since = "1.28.0" )] |
| 71 | #[doc (inline)] |
| 72 | pub use alloc_crate::alloc::*; |
| 73 | |
| 74 | /// The default memory allocator provided by the operating system. |
| 75 | /// |
| 76 | /// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows, |
| 77 | /// plus related functions. However, it is not valid to mix use of the backing |
| 78 | /// system allocator with `System`, as this implementation may include extra |
| 79 | /// work, such as to serve alignment requests greater than the alignment |
| 80 | /// provided directly by the backing system allocator. |
| 81 | /// |
| 82 | /// This type implements the [`GlobalAlloc`] trait. Currently the default |
| 83 | /// global allocator is unspecified. Libraries, however, like `cdylib`s and |
| 84 | /// `staticlib`s are guaranteed to use the [`System`] by default and as such |
| 85 | /// work as if they had this definition: |
| 86 | /// |
| 87 | /// ```rust |
| 88 | /// use std::alloc::System; |
| 89 | /// |
| 90 | /// #[global_allocator] |
| 91 | /// static A: System = System; |
| 92 | /// |
| 93 | /// fn main() { |
| 94 | /// let a = Box::new(4); // Allocates from the system allocator. |
| 95 | /// println!("{a}" ); |
| 96 | /// } |
| 97 | /// ``` |
| 98 | /// |
| 99 | /// You can also define your own wrapper around `System` if you'd like, such as |
| 100 | /// keeping track of the number of all bytes allocated: |
| 101 | /// |
| 102 | /// ```rust |
| 103 | /// use std::alloc::{System, GlobalAlloc, Layout}; |
| 104 | /// use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; |
| 105 | /// |
| 106 | /// struct Counter; |
| 107 | /// |
| 108 | /// static ALLOCATED: AtomicUsize = AtomicUsize::new(0); |
| 109 | /// |
| 110 | /// unsafe impl GlobalAlloc for Counter { |
| 111 | /// unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
| 112 | /// let ret = unsafe { System.alloc(layout) }; |
| 113 | /// if !ret.is_null() { |
| 114 | /// ALLOCATED.fetch_add(layout.size(), Relaxed); |
| 115 | /// } |
| 116 | /// ret |
| 117 | /// } |
| 118 | /// |
| 119 | /// unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { |
| 120 | /// unsafe { System.dealloc(ptr, layout); } |
| 121 | /// ALLOCATED.fetch_sub(layout.size(), Relaxed); |
| 122 | /// } |
| 123 | /// } |
| 124 | /// |
| 125 | /// #[global_allocator] |
| 126 | /// static A: Counter = Counter; |
| 127 | /// |
| 128 | /// fn main() { |
| 129 | /// println!("allocated bytes before main: {}" , ALLOCATED.load(Relaxed)); |
| 130 | /// } |
| 131 | /// ``` |
| 132 | /// |
| 133 | /// It can also be used directly to allocate memory independently of whatever |
| 134 | /// global allocator has been selected for a Rust program. For example if a Rust |
| 135 | /// program opts in to using jemalloc as the global allocator, `System` will |
| 136 | /// still allocate memory using `malloc` and `HeapAlloc`. |
| 137 | #[stable (feature = "alloc_system_type" , since = "1.28.0" )] |
| 138 | #[derive(Debug, Default, Copy, Clone)] |
| 139 | pub struct System; |
| 140 | |
| 141 | impl System { |
| 142 | #[inline ] |
| 143 | fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> { |
| 144 | match layout.size() { |
| 145 | 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling_ptr(), 0)), |
| 146 | // SAFETY: `layout` is non-zero in size, |
| 147 | size => unsafe { |
| 148 | let raw_ptr = if zeroed { |
| 149 | GlobalAlloc::alloc_zeroed(self, layout) |
| 150 | } else { |
| 151 | GlobalAlloc::alloc(self, layout) |
| 152 | }; |
| 153 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
| 154 | Ok(NonNull::slice_from_raw_parts(ptr, size)) |
| 155 | }, |
| 156 | } |
| 157 | } |
| 158 | |
| 159 | // SAFETY: Same as `Allocator::grow` |
| 160 | #[inline ] |
| 161 | unsafe fn grow_impl( |
| 162 | &self, |
| 163 | ptr: NonNull<u8>, |
| 164 | old_layout: Layout, |
| 165 | new_layout: Layout, |
| 166 | zeroed: bool, |
| 167 | ) -> Result<NonNull<[u8]>, AllocError> { |
| 168 | debug_assert!( |
| 169 | new_layout.size() >= old_layout.size(), |
| 170 | "`new_layout.size()` must be greater than or equal to `old_layout.size()`" |
| 171 | ); |
| 172 | |
| 173 | match old_layout.size() { |
| 174 | 0 => self.alloc_impl(new_layout, zeroed), |
| 175 | |
| 176 | // SAFETY: `new_size` is non-zero as `new_size` is greater than or equal to `old_size` |
| 177 | // as required by safety conditions and the `old_size == 0` case was handled in the |
| 178 | // previous match arm. Other conditions must be upheld by the caller |
| 179 | old_size if old_layout.align() == new_layout.align() => unsafe { |
| 180 | let new_size = new_layout.size(); |
| 181 | |
| 182 | // `realloc` probably checks for `new_size >= old_layout.size()` or something similar. |
| 183 | hint::assert_unchecked(new_size >= old_layout.size()); |
| 184 | |
| 185 | let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size); |
| 186 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
| 187 | if zeroed { |
| 188 | raw_ptr.add(old_size).write_bytes(0, new_size - old_size); |
| 189 | } |
| 190 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) |
| 191 | }, |
| 192 | |
| 193 | // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`, |
| 194 | // both the old and new memory allocation are valid for reads and writes for `old_size` |
| 195 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap |
| 196 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract |
| 197 | // for `dealloc` must be upheld by the caller. |
| 198 | old_size => unsafe { |
| 199 | let new_ptr = self.alloc_impl(new_layout, zeroed)?; |
| 200 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size); |
| 201 | Allocator::deallocate(self, ptr, old_layout); |
| 202 | Ok(new_ptr) |
| 203 | }, |
| 204 | } |
| 205 | } |
| 206 | } |
| 207 | |
| 208 | // The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl, |
| 209 | // which is in `std::sys::*::alloc`. |
| 210 | #[unstable (feature = "allocator_api" , issue = "32838" )] |
| 211 | unsafe impl Allocator for System { |
| 212 | #[inline ] |
| 213 | fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
| 214 | self.alloc_impl(layout, false) |
| 215 | } |
| 216 | |
| 217 | #[inline ] |
| 218 | fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> { |
| 219 | self.alloc_impl(layout, true) |
| 220 | } |
| 221 | |
| 222 | #[inline ] |
| 223 | unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) { |
| 224 | if layout.size() != 0 { |
| 225 | // SAFETY: `layout` is non-zero in size, |
| 226 | // other conditions must be upheld by the caller |
| 227 | unsafe { GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) } |
| 228 | } |
| 229 | } |
| 230 | |
| 231 | #[inline ] |
| 232 | unsafe fn grow( |
| 233 | &self, |
| 234 | ptr: NonNull<u8>, |
| 235 | old_layout: Layout, |
| 236 | new_layout: Layout, |
| 237 | ) -> Result<NonNull<[u8]>, AllocError> { |
| 238 | // SAFETY: all conditions must be upheld by the caller |
| 239 | unsafe { self.grow_impl(ptr, old_layout, new_layout, false) } |
| 240 | } |
| 241 | |
| 242 | #[inline ] |
| 243 | unsafe fn grow_zeroed( |
| 244 | &self, |
| 245 | ptr: NonNull<u8>, |
| 246 | old_layout: Layout, |
| 247 | new_layout: Layout, |
| 248 | ) -> Result<NonNull<[u8]>, AllocError> { |
| 249 | // SAFETY: all conditions must be upheld by the caller |
| 250 | unsafe { self.grow_impl(ptr, old_layout, new_layout, true) } |
| 251 | } |
| 252 | |
| 253 | #[inline ] |
| 254 | unsafe fn shrink( |
| 255 | &self, |
| 256 | ptr: NonNull<u8>, |
| 257 | old_layout: Layout, |
| 258 | new_layout: Layout, |
| 259 | ) -> Result<NonNull<[u8]>, AllocError> { |
| 260 | debug_assert!( |
| 261 | new_layout.size() <= old_layout.size(), |
| 262 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" |
| 263 | ); |
| 264 | |
| 265 | match new_layout.size() { |
| 266 | // SAFETY: conditions must be upheld by the caller |
| 267 | 0 => unsafe { |
| 268 | Allocator::deallocate(self, ptr, old_layout); |
| 269 | Ok(NonNull::slice_from_raw_parts(new_layout.dangling_ptr(), 0)) |
| 270 | }, |
| 271 | |
| 272 | // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller |
| 273 | new_size if old_layout.align() == new_layout.align() => unsafe { |
| 274 | // `realloc` probably checks for `new_size <= old_layout.size()` or something similar. |
| 275 | hint::assert_unchecked(new_size <= old_layout.size()); |
| 276 | |
| 277 | let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size); |
| 278 | let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?; |
| 279 | Ok(NonNull::slice_from_raw_parts(ptr, new_size)) |
| 280 | }, |
| 281 | |
| 282 | // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`, |
| 283 | // both the old and new memory allocation are valid for reads and writes for `new_size` |
| 284 | // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap |
| 285 | // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract |
| 286 | // for `dealloc` must be upheld by the caller. |
| 287 | new_size => unsafe { |
| 288 | let new_ptr = Allocator::allocate(self, new_layout)?; |
| 289 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size); |
| 290 | Allocator::deallocate(self, ptr, old_layout); |
| 291 | Ok(new_ptr) |
| 292 | }, |
| 293 | } |
| 294 | } |
| 295 | } |
| 296 | |
| 297 | static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut()); |
| 298 | |
| 299 | /// Registers a custom allocation error hook, replacing any that was previously registered. |
| 300 | /// |
| 301 | /// The allocation error hook is invoked when an infallible memory allocation fails — that is, |
| 302 | /// as a consequence of calling [`handle_alloc_error`] — before the runtime aborts. |
| 303 | /// |
| 304 | /// The allocation error hook is a global resource. [`take_alloc_error_hook`] may be used to |
| 305 | /// retrieve a previously registered hook and wrap or discard it. |
| 306 | /// |
| 307 | /// # What the provided `hook` function should expect |
| 308 | /// |
| 309 | /// The hook function is provided with a [`Layout`] struct which contains information |
| 310 | /// about the allocation that failed. |
| 311 | /// |
| 312 | /// The hook function may choose to panic or abort; in the event that it returns normally, this |
| 313 | /// will cause an immediate abort. |
| 314 | /// |
| 315 | /// Since [`take_alloc_error_hook`] is a safe function that allows retrieving the hook, the hook |
| 316 | /// function must be _sound_ to call even if no memory allocations were attempted. |
| 317 | /// |
| 318 | /// # The default hook |
| 319 | /// |
| 320 | /// The default hook, used if [`set_alloc_error_hook`] is never called, prints a message to |
| 321 | /// standard error (and then returns, causing the runtime to abort the process). |
| 322 | /// Compiler options may cause it to panic instead, and the default behavior may be changed |
| 323 | /// to panicking in future versions of Rust. |
| 324 | /// |
| 325 | /// # Examples |
| 326 | /// |
| 327 | /// ``` |
| 328 | /// #![feature(alloc_error_hook)] |
| 329 | /// |
| 330 | /// use std::alloc::{Layout, set_alloc_error_hook}; |
| 331 | /// |
| 332 | /// fn custom_alloc_error_hook(layout: Layout) { |
| 333 | /// panic!("memory allocation of {} bytes failed" , layout.size()); |
| 334 | /// } |
| 335 | /// |
| 336 | /// set_alloc_error_hook(custom_alloc_error_hook); |
| 337 | /// ``` |
| 338 | #[unstable (feature = "alloc_error_hook" , issue = "51245" )] |
| 339 | pub fn set_alloc_error_hook(hook: fn(Layout)) { |
| 340 | HOOK.store(hook as *mut (), Ordering::Release); |
| 341 | } |
| 342 | |
| 343 | /// Unregisters the current allocation error hook, returning it. |
| 344 | /// |
| 345 | /// *See also the function [`set_alloc_error_hook`].* |
| 346 | /// |
| 347 | /// If no custom hook is registered, the default hook will be returned. |
| 348 | #[unstable (feature = "alloc_error_hook" , issue = "51245" )] |
| 349 | pub fn take_alloc_error_hook() -> fn(Layout) { |
| 350 | let hook = HOOK.swap(ptr::null_mut(), Ordering::Acquire); |
| 351 | if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } } |
| 352 | } |
| 353 | |
| 354 | #[optimize (size)] |
| 355 | fn default_alloc_error_hook(layout: Layout) { |
| 356 | if cfg!(panic = "immediate-abort" ) { |
| 357 | return; |
| 358 | } |
| 359 | |
| 360 | // This is the default path taken on OOM, and the only path taken on stable with std. |
| 361 | // Crucially, it does *not* call any user-defined code, and therefore users do not have to |
| 362 | // worry about allocation failure causing reentrancy issues. That makes it different from |
| 363 | // the default `__rdl_alloc_error_handler` defined in alloc (i.e., the default alloc error |
| 364 | // handler that is called when there is no `#[alloc_error_handler]`), which triggers a |
| 365 | // regular panic and thus can invoke a user-defined panic hook, executing arbitrary |
| 366 | // user-defined code. |
| 367 | |
| 368 | static PREV_ALLOC_FAILURE: AtomicBool = AtomicBool::new(false); |
| 369 | if PREV_ALLOC_FAILURE.swap(true, Ordering::Relaxed) { |
| 370 | // Don't try to print a backtrace if a previous alloc error happened. This likely means |
| 371 | // there is not enough memory to print a backtrace, although it could also mean that two |
| 372 | // threads concurrently run out of memory. |
| 373 | rtprintpanic!( |
| 374 | "memory allocation of {} bytes failed \nskipping backtrace printing to avoid potential recursion \n" , |
| 375 | layout.size() |
| 376 | ); |
| 377 | return; |
| 378 | } else { |
| 379 | rtprintpanic!("memory allocation of {} bytes failed \n" , layout.size()); |
| 380 | } |
| 381 | |
| 382 | let Some(mut out) = crate::sys::stdio::panic_output() else { |
| 383 | return; |
| 384 | }; |
| 385 | |
| 386 | // Use a lock to prevent mixed output in multithreading context. |
| 387 | // Some platforms also require it when printing a backtrace, like `SymFromAddr` on Windows. |
| 388 | // Make sure to not take this lock until after checking PREV_ALLOC_FAILURE to avoid deadlocks |
| 389 | // when there is too little memory to print a backtrace. |
| 390 | let mut lock = crate::sys::backtrace::lock(); |
| 391 | |
| 392 | match crate::panic::get_backtrace_style() { |
| 393 | Some(crate::panic::BacktraceStyle::Short) => { |
| 394 | drop(lock.print(&mut out, crate::backtrace_rs::PrintFmt::Short)) |
| 395 | } |
| 396 | Some(crate::panic::BacktraceStyle::Full) => { |
| 397 | drop(lock.print(&mut out, crate::backtrace_rs::PrintFmt::Full)) |
| 398 | } |
| 399 | Some(crate::panic::BacktraceStyle::Off) => { |
| 400 | use crate::io::Write; |
| 401 | let _ = writeln!( |
| 402 | out, |
| 403 | "note: run with `RUST_BACKTRACE=1` environment variable to display a \ |
| 404 | backtrace" |
| 405 | ); |
| 406 | if cfg!(miri) { |
| 407 | let _ = writeln!( |
| 408 | out, |
| 409 | "note: in Miri, you may have to set `MIRIFLAGS=-Zmiri-env-forward=RUST_BACKTRACE` \ |
| 410 | for the environment variable to have an effect" |
| 411 | ); |
| 412 | } |
| 413 | } |
| 414 | // If backtraces aren't supported or are forced-off, do nothing. |
| 415 | None => {} |
| 416 | } |
| 417 | } |
| 418 | |
| 419 | #[cfg (not(test))] |
| 420 | #[doc (hidden)] |
| 421 | #[alloc_error_handler] |
| 422 | #[unstable (feature = "alloc_internals" , issue = "none" )] |
| 423 | pub fn rust_oom(layout: Layout) -> ! { |
| 424 | crate::sys::backtrace::__rust_end_short_backtrace(|| { |
| 425 | let hook = HOOK.load(Ordering::Acquire); |
| 426 | let hook: fn(Layout) = |
| 427 | if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } }; |
| 428 | hook(layout); |
| 429 | crate::process::abort() |
| 430 | }) |
| 431 | } |
| 432 | |
| 433 | #[cfg (not(test))] |
| 434 | #[doc (hidden)] |
| 435 | #[allow (unused_attributes)] |
| 436 | #[unstable (feature = "alloc_internals" , issue = "none" )] |
| 437 | pub mod __default_lib_allocator { |
| 438 | use super::{GlobalAlloc, Layout, System}; |
| 439 | // These magic symbol names are used as a fallback for implementing the |
| 440 | // `__rust_alloc` etc symbols (see `src/liballoc/alloc.rs`) when there is |
| 441 | // no `#[global_allocator]` attribute. |
| 442 | |
| 443 | // for symbol names src/librustc_ast/expand/allocator.rs |
| 444 | // for signatures src/librustc_allocator/lib.rs |
| 445 | |
| 446 | // linkage directives are provided as part of the current compiler allocator |
| 447 | // ABI |
| 448 | |
| 449 | #[rustc_std_internal_symbol ] |
| 450 | pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 { |
| 451 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
| 452 | // `GlobalAlloc::alloc`. |
| 453 | unsafe { |
| 454 | let layout = Layout::from_size_align_unchecked(size, align); |
| 455 | System.alloc(layout) |
| 456 | } |
| 457 | } |
| 458 | |
| 459 | #[rustc_std_internal_symbol ] |
| 460 | pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) { |
| 461 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
| 462 | // `GlobalAlloc::dealloc`. |
| 463 | unsafe { System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) } |
| 464 | } |
| 465 | |
| 466 | #[rustc_std_internal_symbol ] |
| 467 | pub unsafe extern "C" fn __rdl_realloc( |
| 468 | ptr: *mut u8, |
| 469 | old_size: usize, |
| 470 | align: usize, |
| 471 | new_size: usize, |
| 472 | ) -> *mut u8 { |
| 473 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
| 474 | // `GlobalAlloc::realloc`. |
| 475 | unsafe { |
| 476 | let old_layout = Layout::from_size_align_unchecked(old_size, align); |
| 477 | System.realloc(ptr, old_layout, new_size) |
| 478 | } |
| 479 | } |
| 480 | |
| 481 | #[rustc_std_internal_symbol ] |
| 482 | pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 { |
| 483 | // SAFETY: see the guarantees expected by `Layout::from_size_align` and |
| 484 | // `GlobalAlloc::alloc_zeroed`. |
| 485 | unsafe { |
| 486 | let layout = Layout::from_size_align_unchecked(size, align); |
| 487 | System.alloc_zeroed(layout) |
| 488 | } |
| 489 | } |
| 490 | } |
| 491 | |