| 1 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT |
| 2 | // file at the top-level directory of this distribution and at |
| 3 | // http://rust-lang.org/COPYRIGHT. |
| 4 | // |
| 5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
| 6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
| 7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
| 8 | // option. This file may not be copied, modified, or distributed |
| 9 | // except according to those terms. |
| 10 | |
| 11 | //! Bindings for jemalloc as an allocator |
| 12 | //! |
| 13 | //! This crate provides bindings to jemalloc as a memory allocator for Rust. |
| 14 | //! This crate mainly exports, one type, `Jemalloc`, which implements the |
| 15 | //! `GlobalAlloc` trait and optionally the `Alloc` trait, |
| 16 | //! and is suitable both as a memory allocator and as a global allocator. |
| 17 | |
| 18 | #![cfg_attr (feature = "alloc_trait" , feature(allocator_api))] |
| 19 | // TODO: rename the following lint on next minor bump |
| 20 | #![allow (renamed_and_removed_lints)] |
| 21 | #![deny (missing_docs, broken_intra_doc_links)] |
| 22 | #![no_std ] |
| 23 | |
| 24 | #[cfg (feature = "alloc_trait" )] |
| 25 | use core::alloc::{Alloc, AllocErr, CannotReallocInPlace, Excess}; |
| 26 | use core::alloc::{GlobalAlloc, Layout}; |
| 27 | #[cfg (feature = "alloc_trait" )] |
| 28 | use core::ptr::NonNull; |
| 29 | |
| 30 | use libc::{c_int, c_void}; |
| 31 | |
| 32 | // This constant equals _Alignof(max_align_t) and is platform-specific. It |
| 33 | // contains the _maximum_ alignment that the memory allocations returned by the |
| 34 | // C standard library memory allocation APIs (e.g. `malloc`) are guaranteed to |
| 35 | // have. |
| 36 | // |
| 37 | // The memory allocation APIs are required to return memory that can fit any |
| 38 | // object whose fundamental aligment is <= _Alignof(max_align_t). |
| 39 | // |
| 40 | // In C, there are no ZSTs, and the size of all types is a multiple of their |
| 41 | // alignment (size >= align). So for allocations with size <= |
| 42 | // _Alignof(max_align_t), the malloc-APIs return memory whose alignment is |
| 43 | // either the requested size if its a power-of-two, or the next smaller |
| 44 | // power-of-two. |
| 45 | #[cfg (any( |
| 46 | target_arch = "arm" , |
| 47 | target_arch = "mips" , |
| 48 | target_arch = "mipsel" , |
| 49 | target_arch = "powerpc" |
| 50 | ))] |
| 51 | const ALIGNOF_MAX_ALIGN_T: usize = 8; |
| 52 | #[cfg (any( |
| 53 | target_arch = "x86" , |
| 54 | target_arch = "x86_64" , |
| 55 | target_arch = "aarch64" , |
| 56 | target_arch = "powerpc64" , |
| 57 | target_arch = "powerpc64le" , |
| 58 | target_arch = "loongarch64" , |
| 59 | target_arch = "mips64" , |
| 60 | target_arch = "riscv64" , |
| 61 | target_arch = "s390x" , |
| 62 | target_arch = "sparc64" |
| 63 | ))] |
| 64 | const ALIGNOF_MAX_ALIGN_T: usize = 16; |
| 65 | |
| 66 | /// If `align` is less than `_Alignof(max_align_t)`, and if the requested |
| 67 | /// allocation `size` is larger than the alignment, we are guaranteed to get a |
| 68 | /// suitably aligned allocation by default, without passing extra flags, and |
| 69 | /// this function returns `0`. |
| 70 | /// |
| 71 | /// Otherwise, it returns the alignment flag to pass to the jemalloc APIs. |
| 72 | fn layout_to_flags(align: usize, size: usize) -> c_int { |
| 73 | if align <= ALIGNOF_MAX_ALIGN_T && align <= size { |
| 74 | 0 |
| 75 | } else { |
| 76 | ffi::MALLOCX_ALIGN(aling:align) |
| 77 | } |
| 78 | } |
| 79 | |
| 80 | // Assumes a condition that always must hold. |
| 81 | macro_rules! assume { |
| 82 | ($e:expr) => { |
| 83 | debug_assert!($e); |
| 84 | if !($e) { |
| 85 | core::hint::unreachable_unchecked(); |
| 86 | } |
| 87 | }; |
| 88 | } |
| 89 | |
| 90 | /// Handle to the jemalloc allocator |
| 91 | /// |
| 92 | /// This type implements the `GlobalAllocAlloc` trait, allowing usage a global allocator. |
| 93 | /// |
| 94 | /// When the `alloc_trait` feature of this crate is enabled, it also implements the `Alloc` trait, |
| 95 | /// allowing usage in collections. |
| 96 | #[derive (Copy, Clone, Default, Debug)] |
| 97 | pub struct Jemalloc; |
| 98 | |
| 99 | unsafe impl GlobalAlloc for Jemalloc { |
| 100 | #[inline ] |
| 101 | unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
| 102 | assume!(layout.size() != 0); |
| 103 | let flags = layout_to_flags(layout.align(), layout.size()); |
| 104 | let ptr = if flags == 0 { |
| 105 | ffi::malloc(layout.size()) |
| 106 | } else { |
| 107 | ffi::mallocx(layout.size(), flags) |
| 108 | }; |
| 109 | ptr as *mut u8 |
| 110 | } |
| 111 | |
| 112 | #[inline ] |
| 113 | unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { |
| 114 | assume!(layout.size() != 0); |
| 115 | let flags = layout_to_flags(layout.align(), layout.size()); |
| 116 | let ptr = if flags == 0 { |
| 117 | ffi::calloc(1, layout.size()) |
| 118 | } else { |
| 119 | ffi::mallocx(layout.size(), flags | ffi::MALLOCX_ZERO) |
| 120 | }; |
| 121 | ptr as *mut u8 |
| 122 | } |
| 123 | |
| 124 | #[inline ] |
| 125 | unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { |
| 126 | assume!(!ptr.is_null()); |
| 127 | assume!(layout.size() != 0); |
| 128 | let flags = layout_to_flags(layout.align(), layout.size()); |
| 129 | ffi::sdallocx(ptr as *mut c_void, layout.size(), flags) |
| 130 | } |
| 131 | |
| 132 | #[inline ] |
| 133 | unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { |
| 134 | assume!(layout.size() != 0); |
| 135 | assume!(new_size != 0); |
| 136 | let flags = layout_to_flags(layout.align(), new_size); |
| 137 | let ptr = if flags == 0 { |
| 138 | ffi::realloc(ptr as *mut c_void, new_size) |
| 139 | } else { |
| 140 | ffi::rallocx(ptr as *mut c_void, new_size, flags) |
| 141 | }; |
| 142 | ptr as *mut u8 |
| 143 | } |
| 144 | } |
| 145 | |
| 146 | #[cfg (feature = "alloc_trait" )] |
| 147 | unsafe impl Alloc for Jemalloc { |
| 148 | #[inline ] |
| 149 | unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> { |
| 150 | NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr) |
| 151 | } |
| 152 | |
| 153 | #[inline ] |
| 154 | unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> { |
| 155 | NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr) |
| 156 | } |
| 157 | |
| 158 | #[inline ] |
| 159 | unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) { |
| 160 | GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) |
| 161 | } |
| 162 | |
| 163 | #[inline ] |
| 164 | unsafe fn realloc( |
| 165 | &mut self, |
| 166 | ptr: NonNull<u8>, |
| 167 | layout: Layout, |
| 168 | new_size: usize, |
| 169 | ) -> Result<NonNull<u8>, AllocErr> { |
| 170 | NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr) |
| 171 | } |
| 172 | |
| 173 | #[inline ] |
| 174 | unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> { |
| 175 | let flags = layout_to_flags(layout.align(), layout.size()); |
| 176 | let ptr = ffi::mallocx(layout.size(), flags); |
| 177 | if let Some(nonnull) = NonNull::new(ptr as *mut u8) { |
| 178 | let excess = ffi::nallocx(layout.size(), flags); |
| 179 | Ok(Excess(nonnull, excess)) |
| 180 | } else { |
| 181 | Err(AllocErr) |
| 182 | } |
| 183 | } |
| 184 | |
| 185 | #[inline ] |
| 186 | unsafe fn realloc_excess( |
| 187 | &mut self, |
| 188 | ptr: NonNull<u8>, |
| 189 | layout: Layout, |
| 190 | new_size: usize, |
| 191 | ) -> Result<Excess, AllocErr> { |
| 192 | let flags = layout_to_flags(layout.align(), new_size); |
| 193 | let ptr = ffi::rallocx(ptr.cast().as_ptr(), new_size, flags); |
| 194 | if let Some(nonnull) = NonNull::new(ptr as *mut u8) { |
| 195 | let excess = ffi::nallocx(new_size, flags); |
| 196 | Ok(Excess(nonnull, excess)) |
| 197 | } else { |
| 198 | Err(AllocErr) |
| 199 | } |
| 200 | } |
| 201 | |
| 202 | #[inline ] |
| 203 | fn usable_size(&self, layout: &Layout) -> (usize, usize) { |
| 204 | let flags = layout_to_flags(layout.align(), layout.size()); |
| 205 | unsafe { |
| 206 | let max = ffi::nallocx(layout.size(), flags); |
| 207 | (layout.size(), max) |
| 208 | } |
| 209 | } |
| 210 | |
| 211 | #[inline ] |
| 212 | unsafe fn grow_in_place( |
| 213 | &mut self, |
| 214 | ptr: NonNull<u8>, |
| 215 | layout: Layout, |
| 216 | new_size: usize, |
| 217 | ) -> Result<(), CannotReallocInPlace> { |
| 218 | let flags = layout_to_flags(layout.align(), new_size); |
| 219 | let usable_size = ffi::xallocx(ptr.cast().as_ptr(), new_size, 0, flags); |
| 220 | if usable_size >= new_size { |
| 221 | Ok(()) |
| 222 | } else { |
| 223 | // `xallocx` returns a size smaller than the requested one to |
| 224 | // indicate that the allocation could not be grown in place |
| 225 | // |
| 226 | // the old allocation remains unaltered |
| 227 | Err(CannotReallocInPlace) |
| 228 | } |
| 229 | } |
| 230 | |
| 231 | #[inline ] |
| 232 | unsafe fn shrink_in_place( |
| 233 | &mut self, |
| 234 | ptr: NonNull<u8>, |
| 235 | layout: Layout, |
| 236 | new_size: usize, |
| 237 | ) -> Result<(), CannotReallocInPlace> { |
| 238 | if new_size == layout.size() { |
| 239 | return Ok(()); |
| 240 | } |
| 241 | let flags = layout_to_flags(layout.align(), new_size); |
| 242 | let usable_size = ffi::xallocx(ptr.cast().as_ptr(), new_size, 0, flags); |
| 243 | |
| 244 | if usable_size < layout.size() { |
| 245 | // If `usable_size` is smaller than the original size, the |
| 246 | // size-class of the allocation was shrunk to the size-class of |
| 247 | // `new_size`, and it is safe to deallocate the allocation with |
| 248 | // `new_size`: |
| 249 | Ok(()) |
| 250 | } else if usable_size == ffi::nallocx(new_size, flags) { |
| 251 | // If the allocation was not shrunk and the size class of `new_size` |
| 252 | // is the same as the size-class of `layout.size()`, then the |
| 253 | // allocation can be properly deallocated using `new_size` (and also |
| 254 | // using `layout.size()` because the allocation did not change) |
| 255 | |
| 256 | // note: when the allocation is not shrunk, `xallocx` returns the |
| 257 | // usable size of the original allocation, which in this case matches |
| 258 | // that of the requested allocation: |
| 259 | debug_assert_eq!( |
| 260 | ffi::nallocx(new_size, flags), |
| 261 | ffi::nallocx(layout.size(), flags) |
| 262 | ); |
| 263 | Ok(()) |
| 264 | } else { |
| 265 | // If the allocation was not shrunk, but the size-class of |
| 266 | // `new_size` is not the same as that of the original allocation, |
| 267 | // then shrinking the allocation failed: |
| 268 | Err(CannotReallocInPlace) |
| 269 | } |
| 270 | } |
| 271 | } |
| 272 | |
| 273 | /// Return the usable size of the allocation pointed to by ptr. |
| 274 | /// |
| 275 | /// The return value may be larger than the size that was requested during allocation. |
| 276 | /// This function is not a mechanism for in-place `realloc()`; |
| 277 | /// rather it is provided solely as a tool for introspection purposes. |
| 278 | /// Any discrepancy between the requested allocation size |
| 279 | /// and the size reported by this function should not be depended on, |
| 280 | /// since such behavior is entirely implementation-dependent. |
| 281 | /// |
| 282 | /// # Safety |
| 283 | /// |
| 284 | /// `ptr` must have been allocated by `Jemalloc` and must not have been freed yet. |
| 285 | pub unsafe fn usable_size<T>(ptr: *const T) -> usize { |
| 286 | ffi::malloc_usable_size(ptr as *const c_void) |
| 287 | } |
| 288 | |
| 289 | /// Raw bindings to jemalloc |
| 290 | mod ffi { |
| 291 | pub use tikv_jemalloc_sys::*; |
| 292 | } |
| 293 | |