1use crate::alloc::{GlobalAlloc, Layout, System};
2use crate::cmp;
3use crate::ptr;
4
5// The minimum alignment guaranteed by the architecture. This value is used to
6// add fast paths for low alignment values.
7#[cfg(any(
8 target_arch = "x86",
9 target_arch = "arm",
10 target_arch = "m68k",
11 target_arch = "csky",
12 target_arch = "mips",
13 target_arch = "mips32r6",
14 target_arch = "powerpc",
15 target_arch = "powerpc64",
16 target_arch = "sparc",
17 target_arch = "wasm32",
18 target_arch = "hexagon",
19 all(target_arch = "riscv32", not(any(target_os = "espidf", target_os = "zkvm"))),
20 all(target_arch = "xtensa", not(target_os = "espidf")),
21))]
22pub const MIN_ALIGN: usize = 8;
23#[cfg(any(
24 target_arch = "x86_64",
25 target_arch = "aarch64",
26 target_arch = "arm64ec",
27 target_arch = "loongarch64",
28 target_arch = "mips64",
29 target_arch = "mips64r6",
30 target_arch = "s390x",
31 target_arch = "sparc64",
32 target_arch = "riscv64",
33 target_arch = "wasm64",
34))]
35pub const MIN_ALIGN: usize = 16;
36// The allocator on the esp-idf and zkvm platforms guarantee 4 byte alignment.
37#[cfg(all(any(
38 all(target_arch = "riscv32", any(target_os = "espidf", target_os = "zkvm")),
39 all(target_arch = "xtensa", target_os = "espidf"),
40)))]
41pub const MIN_ALIGN: usize = 4;
42
43pub unsafe fn realloc_fallback(
44 alloc: &System,
45 ptr: *mut u8,
46 old_layout: Layout,
47 new_size: usize,
48) -> *mut u8 {
49 // Docs for GlobalAlloc::realloc require this to be valid:
50 let new_layout: Layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
51
52 let new_ptr: *mut u8 = GlobalAlloc::alloc(self:alloc, new_layout);
53 if !new_ptr.is_null() {
54 let size: usize = cmp::min(v1:old_layout.size(), v2:new_size);
55 ptr::copy_nonoverlapping(src:ptr, dst:new_ptr, count:size);
56 GlobalAlloc::dealloc(self:alloc, ptr, old_layout);
57 }
58 new_ptr
59}
60