| 1 | #![cfg (feature = "extern_crate_alloc" )]
|
| 2 | #![allow (clippy::duplicated_attributes)]
|
| 3 |
|
| 4 | //! Stuff to boost things in the `alloc` crate.
|
| 5 | //!
|
| 6 | //! * You must enable the `extern_crate_alloc` feature of `bytemuck` or you will
|
| 7 | //! not be able to use this module! This is generally done by adding the
|
| 8 | //! feature to the dependency in Cargo.toml like so:
|
| 9 | //!
|
| 10 | //! `bytemuck = { version = "VERSION_YOU_ARE_USING", features =
|
| 11 | //! ["extern_crate_alloc"]}`
|
| 12 |
|
| 13 | use super::*;
|
| 14 | #[cfg (target_has_atomic = "ptr" )]
|
| 15 | use alloc::sync::Arc;
|
| 16 | use alloc::{
|
| 17 | alloc::{alloc_zeroed, Layout},
|
| 18 | boxed::Box,
|
| 19 | rc::Rc,
|
| 20 | vec,
|
| 21 | vec::Vec,
|
| 22 | };
|
| 23 | use core::{
|
| 24 | mem::{size_of_val, ManuallyDrop},
|
| 25 | ops::{Deref, DerefMut},
|
| 26 | };
|
| 27 |
|
| 28 | /// As [`try_cast_box`], but unwraps for you.
|
| 29 | #[inline ]
|
| 30 | pub fn cast_box<A: NoUninit, B: AnyBitPattern>(input: Box<A>) -> Box<B> {
|
| 31 | try_cast_box(input).map_err(|(e: PodCastError, _v: Box)| e).unwrap()
|
| 32 | }
|
| 33 |
|
| 34 | /// Attempts to cast the content type of a [`Box`].
|
| 35 | ///
|
| 36 | /// On failure you get back an error along with the starting `Box`.
|
| 37 | ///
|
| 38 | /// ## Failure
|
| 39 | ///
|
| 40 | /// * The start and end content type of the `Box` must have the exact same
|
| 41 | /// alignment.
|
| 42 | /// * The start and end size of the `Box` must have the exact same size.
|
| 43 | #[inline ]
|
| 44 | pub fn try_cast_box<A: NoUninit, B: AnyBitPattern>(
|
| 45 | input: Box<A>,
|
| 46 | ) -> Result<Box<B>, (PodCastError, Box<A>)> {
|
| 47 | if align_of::<A>() != align_of::<B>() {
|
| 48 | Err((PodCastError::AlignmentMismatch, input))
|
| 49 | } else if size_of::<A>() != size_of::<B>() {
|
| 50 | Err((PodCastError::SizeMismatch, input))
|
| 51 | } else {
|
| 52 | // Note(Lokathor): This is much simpler than with the Vec casting!
|
| 53 | let ptr: *mut B = Box::into_raw(input) as *mut B;
|
| 54 | Ok(unsafe { Box::from_raw(ptr) })
|
| 55 | }
|
| 56 | }
|
| 57 |
|
| 58 | /// Allocates a `Box<T>` with all of the contents being zeroed out.
|
| 59 | ///
|
| 60 | /// This uses the global allocator to create a zeroed allocation and _then_
|
| 61 | /// turns it into a Box. In other words, it's 100% assured that the zeroed data
|
| 62 | /// won't be put temporarily on the stack. You can make a box of any size
|
| 63 | /// without fear of a stack overflow.
|
| 64 | ///
|
| 65 | /// ## Failure
|
| 66 | ///
|
| 67 | /// This fails if the allocation fails.
|
| 68 | #[inline ]
|
| 69 | pub fn try_zeroed_box<T: Zeroable>() -> Result<Box<T>, ()> {
|
| 70 | if size_of::<T>() == 0 {
|
| 71 | // This will not allocate but simply create an arbitrary non-null
|
| 72 | // aligned pointer, valid for Box for a zero-sized pointee.
|
| 73 | let ptr: *mut T = core::ptr::NonNull::dangling().as_ptr();
|
| 74 | return Ok(unsafe { Box::from_raw(ptr) });
|
| 75 | }
|
| 76 | let layout: Layout = Layout::new::<T>();
|
| 77 | let ptr: *mut u8 = unsafe { alloc_zeroed(layout) };
|
| 78 | if ptr.is_null() {
|
| 79 | // we don't know what the error is because `alloc_zeroed` is a dumb API
|
| 80 | Err(())
|
| 81 | } else {
|
| 82 | Ok(unsafe { Box::<T>::from_raw(ptr as *mut T) })
|
| 83 | }
|
| 84 | }
|
| 85 |
|
| 86 | /// As [`try_zeroed_box`], but unwraps for you.
|
| 87 | #[inline ]
|
| 88 | pub fn zeroed_box<T: Zeroable>() -> Box<T> {
|
| 89 | try_zeroed_box().unwrap()
|
| 90 | }
|
| 91 |
|
| 92 | /// Allocates a `Vec<T>` of length and capacity exactly equal to `length` and
|
| 93 | /// all elements zeroed.
|
| 94 | ///
|
| 95 | /// ## Failure
|
| 96 | ///
|
| 97 | /// This fails if the allocation fails, or if a layout cannot be calculated for
|
| 98 | /// the allocation.
|
| 99 | pub fn try_zeroed_vec<T: Zeroable>(length: usize) -> Result<Vec<T>, ()> {
|
| 100 | if length == 0 {
|
| 101 | Ok(Vec::new())
|
| 102 | } else {
|
| 103 | let boxed_slice: Box<[T]> = try_zeroed_slice_box(length)?;
|
| 104 | Ok(boxed_slice.into_vec())
|
| 105 | }
|
| 106 | }
|
| 107 |
|
| 108 | /// As [`try_zeroed_vec`] but unwraps for you
|
| 109 | pub fn zeroed_vec<T: Zeroable>(length: usize) -> Vec<T> {
|
| 110 | try_zeroed_vec(length).unwrap()
|
| 111 | }
|
| 112 |
|
| 113 | /// Allocates a `Box<[T]>` with all contents being zeroed out.
|
| 114 | ///
|
| 115 | /// This uses the global allocator to create a zeroed allocation and _then_
|
| 116 | /// turns it into a Box. In other words, it's 100% assured that the zeroed data
|
| 117 | /// won't be put temporarily on the stack. You can make a box of any size
|
| 118 | /// without fear of a stack overflow.
|
| 119 | ///
|
| 120 | /// ## Failure
|
| 121 | ///
|
| 122 | /// This fails if the allocation fails, or if a layout cannot be calculated for
|
| 123 | /// the allocation.
|
| 124 | #[inline ]
|
| 125 | pub fn try_zeroed_slice_box<T: Zeroable>(
|
| 126 | length: usize,
|
| 127 | ) -> Result<Box<[T]>, ()> {
|
| 128 | if size_of::<T>() == 0 || length == 0 {
|
| 129 | // This will not allocate but simply create an arbitrary non-null aligned
|
| 130 | // slice pointer, valid for Box for a zero-sized pointee.
|
| 131 | let ptr: *mut T = core::ptr::NonNull::dangling().as_ptr();
|
| 132 | let slice_ptr: *mut [T] = core::ptr::slice_from_raw_parts_mut(data:ptr, len:length);
|
| 133 | return Ok(unsafe { Box::from_raw(slice_ptr) });
|
| 134 | }
|
| 135 | let layout: Layout = core::alloc::Layout::array::<T>(length).map_err(|_| ())?;
|
| 136 | let ptr: *mut u8 = unsafe { alloc_zeroed(layout) };
|
| 137 | if ptr.is_null() {
|
| 138 | // we don't know what the error is because `alloc_zeroed` is a dumb API
|
| 139 | Err(())
|
| 140 | } else {
|
| 141 | let slice: &mut [T] =
|
| 142 | unsafe { core::slice::from_raw_parts_mut(data:ptr as *mut T, len:length) };
|
| 143 | Ok(unsafe { Box::<[T]>::from_raw(slice) })
|
| 144 | }
|
| 145 | }
|
| 146 |
|
| 147 | /// As [`try_zeroed_slice_box`], but unwraps for you.
|
| 148 | pub fn zeroed_slice_box<T: Zeroable>(length: usize) -> Box<[T]> {
|
| 149 | try_zeroed_slice_box(length).unwrap()
|
| 150 | }
|
| 151 |
|
| 152 | /// Allocates a `Arc<T>` with all contents being zeroed out.
|
| 153 | #[cfg (all(feature = "alloc_uninit" , target_has_atomic = "ptr" ))]
|
| 154 | pub fn zeroed_arc<T: Zeroable>() -> Arc<T> {
|
| 155 | let mut arc = Arc::new_uninit();
|
| 156 | crate::write_zeroes(Arc::get_mut(&mut arc).unwrap()); // unwrap never fails for a newly allocated Arc
|
| 157 | unsafe { arc.assume_init() }
|
| 158 | }
|
| 159 |
|
| 160 | /// Allocates a `Arc<[T]>` with all contents being zeroed out.
|
| 161 | #[cfg (all(feature = "alloc_uninit" , target_has_atomic = "ptr" ))]
|
| 162 | pub fn zeroed_arc_slice<T: Zeroable>(length: usize) -> Arc<[T]> {
|
| 163 | let mut arc = Arc::new_uninit_slice(length);
|
| 164 | crate::fill_zeroes(Arc::get_mut(&mut arc).unwrap()); // unwrap never fails for a newly allocated Arc
|
| 165 | unsafe { arc.assume_init() }
|
| 166 | }
|
| 167 |
|
| 168 | /// Allocates a `Rc<T>` with all contents being zeroed out.
|
| 169 | #[cfg (feature = "alloc_uninit" )]
|
| 170 | pub fn zeroed_rc<T: Zeroable>() -> Rc<T> {
|
| 171 | let mut rc = Rc::new_uninit();
|
| 172 | crate::write_zeroes(Rc::get_mut(&mut rc).unwrap()); // unwrap never fails for a newly allocated Rc
|
| 173 | unsafe { rc.assume_init() }
|
| 174 | }
|
| 175 |
|
| 176 | /// Allocates a `Rc<[T]>` with all contents being zeroed out.
|
| 177 | #[cfg (feature = "alloc_uninit" )]
|
| 178 | pub fn zeroed_rc_slice<T: Zeroable>(length: usize) -> Rc<[T]> {
|
| 179 | let mut rc = Rc::new_uninit_slice(length);
|
| 180 | crate::fill_zeroes(Rc::get_mut(&mut rc).unwrap()); // unwrap never fails for a newly allocated Rc
|
| 181 | unsafe { rc.assume_init() }
|
| 182 | }
|
| 183 |
|
| 184 | /// As [`try_cast_slice_box`], but unwraps for you.
|
| 185 | #[inline ]
|
| 186 | pub fn cast_slice_box<A: NoUninit, B: AnyBitPattern>(
|
| 187 | input: Box<[A]>,
|
| 188 | ) -> Box<[B]> {
|
| 189 | try_cast_slice_box(input).map_err(|(e: PodCastError, _v: Box<[A]>)| e).unwrap()
|
| 190 | }
|
| 191 |
|
| 192 | /// Attempts to cast the content type of a `Box<[T]>`.
|
| 193 | ///
|
| 194 | /// On failure you get back an error along with the starting `Box<[T]>`.
|
| 195 | ///
|
| 196 | /// ## Failure
|
| 197 | ///
|
| 198 | /// * The start and end content type of the `Box<[T]>` must have the exact same
|
| 199 | /// alignment.
|
| 200 | /// * The start and end content size in bytes of the `Box<[T]>` must be the
|
| 201 | /// exact same.
|
| 202 | #[inline ]
|
| 203 | pub fn try_cast_slice_box<A: NoUninit, B: AnyBitPattern>(
|
| 204 | input: Box<[A]>,
|
| 205 | ) -> Result<Box<[B]>, (PodCastError, Box<[A]>)> {
|
| 206 | if align_of::<A>() != align_of::<B>() {
|
| 207 | Err((PodCastError::AlignmentMismatch, input))
|
| 208 | } else if size_of::<A>() != size_of::<B>() {
|
| 209 | let input_bytes = size_of_val::<[A]>(&*input);
|
| 210 | if (size_of::<B>() == 0 && input_bytes != 0)
|
| 211 | || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
|
| 212 | {
|
| 213 | // If the size in bytes of the underlying buffer does not match an exact
|
| 214 | // multiple of the size of B, we cannot cast between them.
|
| 215 | Err((PodCastError::OutputSliceWouldHaveSlop, input))
|
| 216 | } else {
|
| 217 | // Because the size is an exact multiple, we can now change the length
|
| 218 | // of the slice and recreate the Box
|
| 219 | // NOTE: This is a valid operation because according to the docs of
|
| 220 | // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
|
| 221 | // the block must be the same Layout that is used to dealloc the block.
|
| 222 | // Luckily, Layout only stores two things, the alignment, and the size in
|
| 223 | // bytes. So as long as both of those stay the same, the Layout will
|
| 224 | // remain a valid input to dealloc.
|
| 225 | let length =
|
| 226 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
|
| 227 | let box_ptr: *mut A = Box::into_raw(input) as *mut A;
|
| 228 | let ptr: *mut [B] =
|
| 229 | unsafe { core::slice::from_raw_parts_mut(box_ptr as *mut B, length) };
|
| 230 | Ok(unsafe { Box::<[B]>::from_raw(ptr) })
|
| 231 | }
|
| 232 | } else {
|
| 233 | let box_ptr: *mut [A] = Box::into_raw(input);
|
| 234 | let ptr: *mut [B] = box_ptr as *mut [B];
|
| 235 | Ok(unsafe { Box::<[B]>::from_raw(ptr) })
|
| 236 | }
|
| 237 | }
|
| 238 |
|
| 239 | /// As [`try_cast_vec`], but unwraps for you.
|
| 240 | #[inline ]
|
| 241 | pub fn cast_vec<A: NoUninit, B: AnyBitPattern>(input: Vec<A>) -> Vec<B> {
|
| 242 | try_cast_vec(input).map_err(|(e: PodCastError, _v: Vec)| e).unwrap()
|
| 243 | }
|
| 244 |
|
| 245 | /// Attempts to cast the content type of a [`Vec`].
|
| 246 | ///
|
| 247 | /// On failure you get back an error along with the starting `Vec`.
|
| 248 | ///
|
| 249 | /// ## Failure
|
| 250 | ///
|
| 251 | /// * The start and end content type of the `Vec` must have the exact same
|
| 252 | /// alignment.
|
| 253 | /// * The start and end content size in bytes of the `Vec` must be the exact
|
| 254 | /// same.
|
| 255 | /// * The start and end capacity in bytes of the `Vec` must be the exact same.
|
| 256 | #[inline ]
|
| 257 | pub fn try_cast_vec<A: NoUninit, B: AnyBitPattern>(
|
| 258 | input: Vec<A>,
|
| 259 | ) -> Result<Vec<B>, (PodCastError, Vec<A>)> {
|
| 260 | if align_of::<A>() != align_of::<B>() {
|
| 261 | Err((PodCastError::AlignmentMismatch, input))
|
| 262 | } else if size_of::<A>() != size_of::<B>() {
|
| 263 | let input_size = size_of_val::<[A]>(&*input);
|
| 264 | let input_capacity = input.capacity() * size_of::<A>();
|
| 265 | if (size_of::<B>() == 0 && input_capacity != 0)
|
| 266 | || (size_of::<B>() != 0
|
| 267 | && (input_size % size_of::<B>() != 0
|
| 268 | || input_capacity % size_of::<B>() != 0))
|
| 269 | {
|
| 270 | // If the size in bytes of the underlying buffer does not match an exact
|
| 271 | // multiple of the size of B, we cannot cast between them.
|
| 272 | // Note that we have to pay special attention to make sure that both
|
| 273 | // length and capacity are valid under B, as we do not want to
|
| 274 | // change which bytes are considered part of the initialized slice
|
| 275 | // of the Vec
|
| 276 | Err((PodCastError::OutputSliceWouldHaveSlop, input))
|
| 277 | } else {
|
| 278 | // Because the size is an exact multiple, we can now change the length and
|
| 279 | // capacity and recreate the Vec
|
| 280 | // NOTE: This is a valid operation because according to the docs of
|
| 281 | // std::alloc::GlobalAlloc::dealloc(), the Layout that was used to alloc
|
| 282 | // the block must be the same Layout that is used to dealloc the block.
|
| 283 | // Luckily, Layout only stores two things, the alignment, and the size in
|
| 284 | // bytes. So as long as both of those stay the same, the Layout will
|
| 285 | // remain a valid input to dealloc.
|
| 286 |
|
| 287 | // Note(Lokathor): First we record the length and capacity, which don't
|
| 288 | // have any secret provenance metadata.
|
| 289 | let length: usize =
|
| 290 | if size_of::<B>() != 0 { input_size / size_of::<B>() } else { 0 };
|
| 291 | let capacity: usize =
|
| 292 | if size_of::<B>() != 0 { input_capacity / size_of::<B>() } else { 0 };
|
| 293 | // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
|
| 294 | // ManuallyDrop, because if we used `core::mem::forget` after taking the
|
| 295 | // pointer then that would invalidate our pointer. In nightly there's a
|
| 296 | // "into raw parts" method, which we can switch this too eventually.
|
| 297 | let mut manual_drop_vec = ManuallyDrop::new(input);
|
| 298 | let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
|
| 299 | let ptr: *mut B = vec_ptr as *mut B;
|
| 300 | Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
|
| 301 | }
|
| 302 | } else {
|
| 303 | // Note(Lokathor): First we record the length and capacity, which don't have
|
| 304 | // any secret provenance metadata.
|
| 305 | let length: usize = input.len();
|
| 306 | let capacity: usize = input.capacity();
|
| 307 | // Note(Lokathor): Next we "pre-forget" the old Vec by wrapping with
|
| 308 | // ManuallyDrop, because if we used `core::mem::forget` after taking the
|
| 309 | // pointer then that would invalidate our pointer. In nightly there's a
|
| 310 | // "into raw parts" method, which we can switch this too eventually.
|
| 311 | let mut manual_drop_vec = ManuallyDrop::new(input);
|
| 312 | let vec_ptr: *mut A = manual_drop_vec.as_mut_ptr();
|
| 313 | let ptr: *mut B = vec_ptr as *mut B;
|
| 314 | Ok(unsafe { Vec::from_raw_parts(ptr, length, capacity) })
|
| 315 | }
|
| 316 | }
|
| 317 |
|
| 318 | /// This "collects" a slice of pod data into a vec of a different pod type.
|
| 319 | ///
|
| 320 | /// Unlike with [`cast_slice`] and [`cast_slice_mut`], this will always work.
|
| 321 | ///
|
| 322 | /// The output vec will be of a minimal size/capacity to hold the slice given.
|
| 323 | ///
|
| 324 | /// ```rust
|
| 325 | /// # use bytemuck::*;
|
| 326 | /// let halfwords: [u16; 4] = [5, 6, 7, 8];
|
| 327 | /// let vec_of_words: Vec<u32> = pod_collect_to_vec(&halfwords);
|
| 328 | /// if cfg!(target_endian = "little" ) {
|
| 329 | /// assert_eq!(&vec_of_words[..], &[0x0006_0005, 0x0008_0007][..])
|
| 330 | /// } else {
|
| 331 | /// assert_eq!(&vec_of_words[..], &[0x0005_0006, 0x0007_0008][..])
|
| 332 | /// }
|
| 333 | /// ```
|
| 334 | pub fn pod_collect_to_vec<A: NoUninit, B: NoUninit + AnyBitPattern>(
|
| 335 | src: &[A],
|
| 336 | ) -> Vec<B> {
|
| 337 | let src_size: usize = core::mem::size_of_val(src);
|
| 338 | // Note(Lokathor): dst_count is rounded up so that the dest will always be at
|
| 339 | // least as many bytes as the src.
|
| 340 | let dst_count: usize = src_size / size_of::<B>()
|
| 341 | + if src_size % size_of::<B>() != 0 { 1 } else { 0 };
|
| 342 | let mut dst: Vec = vec![B::zeroed(); dst_count];
|
| 343 |
|
| 344 | let src_bytes: &[u8] = cast_slice(src);
|
| 345 | let dst_bytes: &mut [u8] = cast_slice_mut(&mut dst[..]);
|
| 346 | dst_bytes[..src_size].copy_from_slice(src_bytes);
|
| 347 | dst
|
| 348 | }
|
| 349 |
|
| 350 | /// As [`try_cast_rc`], but unwraps for you.
|
| 351 | #[inline ]
|
| 352 | pub fn cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
|
| 353 | input: Rc<A>,
|
| 354 | ) -> Rc<B> {
|
| 355 | try_cast_rc(input).map_err(|(e: PodCastError, _v: Rc)| e).unwrap()
|
| 356 | }
|
| 357 |
|
| 358 | /// Attempts to cast the content type of a [`Rc`].
|
| 359 | ///
|
| 360 | /// On failure you get back an error along with the starting `Rc`.
|
| 361 | ///
|
| 362 | /// The bounds on this function are the same as [`cast_mut`], because a user
|
| 363 | /// could call `Rc::get_unchecked_mut` on the output, which could be observable
|
| 364 | /// in the input.
|
| 365 | ///
|
| 366 | /// ## Failure
|
| 367 | ///
|
| 368 | /// * The start and end content type of the `Rc` must have the exact same
|
| 369 | /// alignment.
|
| 370 | /// * The start and end size of the `Rc` must have the exact same size.
|
| 371 | #[inline ]
|
| 372 | pub fn try_cast_rc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
|
| 373 | input: Rc<A>,
|
| 374 | ) -> Result<Rc<B>, (PodCastError, Rc<A>)> {
|
| 375 | if align_of::<A>() != align_of::<B>() {
|
| 376 | Err((PodCastError::AlignmentMismatch, input))
|
| 377 | } else if size_of::<A>() != size_of::<B>() {
|
| 378 | Err((PodCastError::SizeMismatch, input))
|
| 379 | } else {
|
| 380 | // Safety: Rc::from_raw requires size and alignment match, which is met.
|
| 381 | let ptr: *const B = Rc::into_raw(this:input) as *const B;
|
| 382 | Ok(unsafe { Rc::from_raw(ptr) })
|
| 383 | }
|
| 384 | }
|
| 385 |
|
| 386 | /// As [`try_cast_arc`], but unwraps for you.
|
| 387 | #[inline ]
|
| 388 | #[cfg (target_has_atomic = "ptr" )]
|
| 389 | pub fn cast_arc<A: NoUninit + AnyBitPattern, B: NoUninit + AnyBitPattern>(
|
| 390 | input: Arc<A>,
|
| 391 | ) -> Arc<B> {
|
| 392 | try_cast_arc(input).map_err(|(e: PodCastError, _v: Arc)| e).unwrap()
|
| 393 | }
|
| 394 |
|
| 395 | /// Attempts to cast the content type of a [`Arc`].
|
| 396 | ///
|
| 397 | /// On failure you get back an error along with the starting `Arc`.
|
| 398 | ///
|
| 399 | /// The bounds on this function are the same as [`cast_mut`], because a user
|
| 400 | /// could call `Rc::get_unchecked_mut` on the output, which could be observable
|
| 401 | /// in the input.
|
| 402 | ///
|
| 403 | /// ## Failure
|
| 404 | ///
|
| 405 | /// * The start and end content type of the `Arc` must have the exact same
|
| 406 | /// alignment.
|
| 407 | /// * The start and end size of the `Arc` must have the exact same size.
|
| 408 | #[inline ]
|
| 409 | #[cfg (target_has_atomic = "ptr" )]
|
| 410 | pub fn try_cast_arc<
|
| 411 | A: NoUninit + AnyBitPattern,
|
| 412 | B: NoUninit + AnyBitPattern,
|
| 413 | >(
|
| 414 | input: Arc<A>,
|
| 415 | ) -> Result<Arc<B>, (PodCastError, Arc<A>)> {
|
| 416 | if align_of::<A>() != align_of::<B>() {
|
| 417 | Err((PodCastError::AlignmentMismatch, input))
|
| 418 | } else if size_of::<A>() != size_of::<B>() {
|
| 419 | Err((PodCastError::SizeMismatch, input))
|
| 420 | } else {
|
| 421 | // Safety: Arc::from_raw requires size and alignment match, which is met.
|
| 422 | let ptr: *const B = Arc::into_raw(this:input) as *const B;
|
| 423 | Ok(unsafe { Arc::from_raw(ptr) })
|
| 424 | }
|
| 425 | }
|
| 426 |
|
| 427 | /// As [`try_cast_slice_rc`], but unwraps for you.
|
| 428 | #[inline ]
|
| 429 | pub fn cast_slice_rc<
|
| 430 | A: NoUninit + AnyBitPattern,
|
| 431 | B: NoUninit + AnyBitPattern,
|
| 432 | >(
|
| 433 | input: Rc<[A]>,
|
| 434 | ) -> Rc<[B]> {
|
| 435 | try_cast_slice_rc(input).map_err(|(e: PodCastError, _v: Rc<[A]>)| e).unwrap()
|
| 436 | }
|
| 437 |
|
| 438 | /// Attempts to cast the content type of a `Rc<[T]>`.
|
| 439 | ///
|
| 440 | /// On failure you get back an error along with the starting `Rc<[T]>`.
|
| 441 | ///
|
| 442 | /// The bounds on this function are the same as [`cast_mut`], because a user
|
| 443 | /// could call `Rc::get_unchecked_mut` on the output, which could be observable
|
| 444 | /// in the input.
|
| 445 | ///
|
| 446 | /// ## Failure
|
| 447 | ///
|
| 448 | /// * The start and end content type of the `Rc<[T]>` must have the exact same
|
| 449 | /// alignment.
|
| 450 | /// * The start and end content size in bytes of the `Rc<[T]>` must be the exact
|
| 451 | /// same.
|
| 452 | #[inline ]
|
| 453 | pub fn try_cast_slice_rc<
|
| 454 | A: NoUninit + AnyBitPattern,
|
| 455 | B: NoUninit + AnyBitPattern,
|
| 456 | >(
|
| 457 | input: Rc<[A]>,
|
| 458 | ) -> Result<Rc<[B]>, (PodCastError, Rc<[A]>)> {
|
| 459 | if align_of::<A>() != align_of::<B>() {
|
| 460 | Err((PodCastError::AlignmentMismatch, input))
|
| 461 | } else if size_of::<A>() != size_of::<B>() {
|
| 462 | let input_bytes = size_of_val::<[A]>(&*input);
|
| 463 | if (size_of::<B>() == 0 && input_bytes != 0)
|
| 464 | || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
|
| 465 | {
|
| 466 | // If the size in bytes of the underlying buffer does not match an exact
|
| 467 | // multiple of the size of B, we cannot cast between them.
|
| 468 | Err((PodCastError::OutputSliceWouldHaveSlop, input))
|
| 469 | } else {
|
| 470 | // Because the size is an exact multiple, we can now change the length
|
| 471 | // of the slice and recreate the Rc
|
| 472 | // NOTE: This is a valid operation because according to the docs of
|
| 473 | // std::rc::Rc::from_raw(), the type U that was in the original Rc<U>
|
| 474 | // acquired from Rc::into_raw() must have the same size alignment and
|
| 475 | // size of the type T in the new Rc<T>. So as long as both the size
|
| 476 | // and alignment stay the same, the Rc will remain a valid Rc.
|
| 477 | let length =
|
| 478 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
|
| 479 | let rc_ptr: *const A = Rc::into_raw(input) as *const A;
|
| 480 | // Must use ptr::slice_from_raw_parts, because we cannot make an
|
| 481 | // intermediate const reference, because it has mutable provenance,
|
| 482 | // nor an intermediate mutable reference, because it could be aliased.
|
| 483 | let ptr = core::ptr::slice_from_raw_parts(rc_ptr as *const B, length);
|
| 484 | Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
|
| 485 | }
|
| 486 | } else {
|
| 487 | let rc_ptr: *const [A] = Rc::into_raw(input);
|
| 488 | let ptr: *const [B] = rc_ptr as *const [B];
|
| 489 | Ok(unsafe { Rc::<[B]>::from_raw(ptr) })
|
| 490 | }
|
| 491 | }
|
| 492 |
|
| 493 | /// As [`try_cast_slice_arc`], but unwraps for you.
|
| 494 | #[inline ]
|
| 495 | #[cfg (target_has_atomic = "ptr" )]
|
| 496 | pub fn cast_slice_arc<
|
| 497 | A: NoUninit + AnyBitPattern,
|
| 498 | B: NoUninit + AnyBitPattern,
|
| 499 | >(
|
| 500 | input: Arc<[A]>,
|
| 501 | ) -> Arc<[B]> {
|
| 502 | try_cast_slice_arc(input).map_err(|(e: PodCastError, _v: Arc<[A]>)| e).unwrap()
|
| 503 | }
|
| 504 |
|
| 505 | /// Attempts to cast the content type of a `Arc<[T]>`.
|
| 506 | ///
|
| 507 | /// On failure you get back an error along with the starting `Arc<[T]>`.
|
| 508 | ///
|
| 509 | /// The bounds on this function are the same as [`cast_mut`], because a user
|
| 510 | /// could call `Rc::get_unchecked_mut` on the output, which could be observable
|
| 511 | /// in the input.
|
| 512 | ///
|
| 513 | /// ## Failure
|
| 514 | ///
|
| 515 | /// * The start and end content type of the `Arc<[T]>` must have the exact same
|
| 516 | /// alignment.
|
| 517 | /// * The start and end content size in bytes of the `Arc<[T]>` must be the
|
| 518 | /// exact same.
|
| 519 | #[inline ]
|
| 520 | #[cfg (target_has_atomic = "ptr" )]
|
| 521 | pub fn try_cast_slice_arc<
|
| 522 | A: NoUninit + AnyBitPattern,
|
| 523 | B: NoUninit + AnyBitPattern,
|
| 524 | >(
|
| 525 | input: Arc<[A]>,
|
| 526 | ) -> Result<Arc<[B]>, (PodCastError, Arc<[A]>)> {
|
| 527 | if align_of::<A>() != align_of::<B>() {
|
| 528 | Err((PodCastError::AlignmentMismatch, input))
|
| 529 | } else if size_of::<A>() != size_of::<B>() {
|
| 530 | let input_bytes = size_of_val::<[A]>(&*input);
|
| 531 | if (size_of::<B>() == 0 && input_bytes != 0)
|
| 532 | || (size_of::<B>() != 0 && input_bytes % size_of::<B>() != 0)
|
| 533 | {
|
| 534 | // If the size in bytes of the underlying buffer does not match an exact
|
| 535 | // multiple of the size of B, we cannot cast between them.
|
| 536 | Err((PodCastError::OutputSliceWouldHaveSlop, input))
|
| 537 | } else {
|
| 538 | // Because the size is an exact multiple, we can now change the length
|
| 539 | // of the slice and recreate the Arc
|
| 540 | // NOTE: This is a valid operation because according to the docs of
|
| 541 | // std::sync::Arc::from_raw(), the type U that was in the original Arc<U>
|
| 542 | // acquired from Arc::into_raw() must have the same size alignment and
|
| 543 | // size of the type T in the new Arc<T>. So as long as both the size
|
| 544 | // and alignment stay the same, the Arc will remain a valid Arc.
|
| 545 | let length =
|
| 546 | if size_of::<B>() != 0 { input_bytes / size_of::<B>() } else { 0 };
|
| 547 | let arc_ptr: *const A = Arc::into_raw(input) as *const A;
|
| 548 | // Must use ptr::slice_from_raw_parts, because we cannot make an
|
| 549 | // intermediate const reference, because it has mutable provenance,
|
| 550 | // nor an intermediate mutable reference, because it could be aliased.
|
| 551 | let ptr = core::ptr::slice_from_raw_parts(arc_ptr as *const B, length);
|
| 552 | Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
|
| 553 | }
|
| 554 | } else {
|
| 555 | let arc_ptr: *const [A] = Arc::into_raw(input);
|
| 556 | let ptr: *const [B] = arc_ptr as *const [B];
|
| 557 | Ok(unsafe { Arc::<[B]>::from_raw(ptr) })
|
| 558 | }
|
| 559 | }
|
| 560 |
|
| 561 | /// An extension trait for `TransparentWrapper` and alloc types.
|
| 562 | pub trait TransparentWrapperAlloc<Inner: ?Sized>:
|
| 563 | TransparentWrapper<Inner>
|
| 564 | {
|
| 565 | /// Convert a vec of the inner type into a vec of the wrapper type.
|
| 566 | fn wrap_vec(s: Vec<Inner>) -> Vec<Self>
|
| 567 | where
|
| 568 | Self: Sized,
|
| 569 | Inner: Sized,
|
| 570 | {
|
| 571 | let mut s = ManuallyDrop::new(s);
|
| 572 |
|
| 573 | let length = s.len();
|
| 574 | let capacity = s.capacity();
|
| 575 | let ptr = s.as_mut_ptr();
|
| 576 |
|
| 577 | unsafe {
|
| 578 | // SAFETY:
|
| 579 | // * ptr comes from Vec (and will not be double-dropped)
|
| 580 | // * the two types have the identical representation
|
| 581 | // * the len and capacity fields are valid
|
| 582 | Vec::from_raw_parts(ptr as *mut Self, length, capacity)
|
| 583 | }
|
| 584 | }
|
| 585 |
|
| 586 | /// Convert a box to the inner type into a box to the wrapper
|
| 587 | /// type.
|
| 588 | #[inline ]
|
| 589 | fn wrap_box(s: Box<Inner>) -> Box<Self> {
|
| 590 | // The unsafe contract requires that these two have
|
| 591 | // identical representations, and thus identical pointer metadata.
|
| 592 | // Assert that Self and Inner have the same pointer size,
|
| 593 | // which is the best we can do to assert their metadata is the same type
|
| 594 | // on stable.
|
| 595 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
|
| 596 |
|
| 597 | unsafe {
|
| 598 | // A pointer cast doesn't work here because rustc can't tell that
|
| 599 | // the vtables match (because of the `?Sized` restriction relaxation).
|
| 600 | // A `transmute` doesn't work because the sizes are unspecified.
|
| 601 | //
|
| 602 | // SAFETY:
|
| 603 | // * The unsafe contract requires that pointers to Inner and Self have
|
| 604 | // identical representations
|
| 605 | // * Box is guaranteed to have representation identical to a (non-null)
|
| 606 | // pointer
|
| 607 | // * The pointer comes from a box (and thus satisfies all safety
|
| 608 | // requirements of Box)
|
| 609 | let inner_ptr: *mut Inner = Box::into_raw(s);
|
| 610 | let wrapper_ptr: *mut Self = transmute!(inner_ptr);
|
| 611 | Box::from_raw(wrapper_ptr)
|
| 612 | }
|
| 613 | }
|
| 614 |
|
| 615 | /// Convert an [`Rc`] to the inner type into an `Rc` to the wrapper type.
|
| 616 | #[inline ]
|
| 617 | fn wrap_rc(s: Rc<Inner>) -> Rc<Self> {
|
| 618 | // The unsafe contract requires that these two have
|
| 619 | // identical representations, and thus identical pointer metadata.
|
| 620 | // Assert that Self and Inner have the same pointer size,
|
| 621 | // which is the best we can do to assert their metadata is the same type
|
| 622 | // on stable.
|
| 623 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
|
| 624 |
|
| 625 | unsafe {
|
| 626 | // A pointer cast doesn't work here because rustc can't tell that
|
| 627 | // the vtables match (because of the `?Sized` restriction relaxation).
|
| 628 | // A `transmute` doesn't work because the layout of Rc is unspecified.
|
| 629 | //
|
| 630 | // SAFETY:
|
| 631 | // * The unsafe contract requires that pointers to Inner and Self have
|
| 632 | // identical representations, and that the size and alignment of Inner
|
| 633 | // and Self are the same, which meets the safety requirements of
|
| 634 | // Rc::from_raw
|
| 635 | let inner_ptr: *const Inner = Rc::into_raw(s);
|
| 636 | let wrapper_ptr: *const Self = transmute!(inner_ptr);
|
| 637 | Rc::from_raw(wrapper_ptr)
|
| 638 | }
|
| 639 | }
|
| 640 |
|
| 641 | /// Convert an [`Arc`] to the inner type into an `Arc` to the wrapper type.
|
| 642 | #[inline ]
|
| 643 | #[cfg (target_has_atomic = "ptr" )]
|
| 644 | fn wrap_arc(s: Arc<Inner>) -> Arc<Self> {
|
| 645 | // The unsafe contract requires that these two have
|
| 646 | // identical representations, and thus identical pointer metadata.
|
| 647 | // Assert that Self and Inner have the same pointer size,
|
| 648 | // which is the best we can do to assert their metadata is the same type
|
| 649 | // on stable.
|
| 650 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
|
| 651 |
|
| 652 | unsafe {
|
| 653 | // A pointer cast doesn't work here because rustc can't tell that
|
| 654 | // the vtables match (because of the `?Sized` restriction relaxation).
|
| 655 | // A `transmute` doesn't work because the layout of Arc is unspecified.
|
| 656 | //
|
| 657 | // SAFETY:
|
| 658 | // * The unsafe contract requires that pointers to Inner and Self have
|
| 659 | // identical representations, and that the size and alignment of Inner
|
| 660 | // and Self are the same, which meets the safety requirements of
|
| 661 | // Arc::from_raw
|
| 662 | let inner_ptr: *const Inner = Arc::into_raw(s);
|
| 663 | let wrapper_ptr: *const Self = transmute!(inner_ptr);
|
| 664 | Arc::from_raw(wrapper_ptr)
|
| 665 | }
|
| 666 | }
|
| 667 |
|
| 668 | /// Convert a vec of the wrapper type into a vec of the inner type.
|
| 669 | fn peel_vec(s: Vec<Self>) -> Vec<Inner>
|
| 670 | where
|
| 671 | Self: Sized,
|
| 672 | Inner: Sized,
|
| 673 | {
|
| 674 | let mut s = ManuallyDrop::new(s);
|
| 675 |
|
| 676 | let length = s.len();
|
| 677 | let capacity = s.capacity();
|
| 678 | let ptr = s.as_mut_ptr();
|
| 679 |
|
| 680 | unsafe {
|
| 681 | // SAFETY:
|
| 682 | // * ptr comes from Vec (and will not be double-dropped)
|
| 683 | // * the two types have the identical representation
|
| 684 | // * the len and capacity fields are valid
|
| 685 | Vec::from_raw_parts(ptr as *mut Inner, length, capacity)
|
| 686 | }
|
| 687 | }
|
| 688 |
|
| 689 | /// Convert a box to the wrapper type into a box to the inner
|
| 690 | /// type.
|
| 691 | #[inline ]
|
| 692 | fn peel_box(s: Box<Self>) -> Box<Inner> {
|
| 693 | // The unsafe contract requires that these two have
|
| 694 | // identical representations, and thus identical pointer metadata.
|
| 695 | // Assert that Self and Inner have the same pointer size,
|
| 696 | // which is the best we can do to assert their metadata is the same type
|
| 697 | // on stable.
|
| 698 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
|
| 699 |
|
| 700 | unsafe {
|
| 701 | // A pointer cast doesn't work here because rustc can't tell that
|
| 702 | // the vtables match (because of the `?Sized` restriction relaxation).
|
| 703 | // A `transmute` doesn't work because the sizes are unspecified.
|
| 704 | //
|
| 705 | // SAFETY:
|
| 706 | // * The unsafe contract requires that pointers to Inner and Self have
|
| 707 | // identical representations
|
| 708 | // * Box is guaranteed to have representation identical to a (non-null)
|
| 709 | // pointer
|
| 710 | // * The pointer comes from a box (and thus satisfies all safety
|
| 711 | // requirements of Box)
|
| 712 | let wrapper_ptr: *mut Self = Box::into_raw(s);
|
| 713 | let inner_ptr: *mut Inner = transmute!(wrapper_ptr);
|
| 714 | Box::from_raw(inner_ptr)
|
| 715 | }
|
| 716 | }
|
| 717 |
|
| 718 | /// Convert an [`Rc`] to the wrapper type into an `Rc` to the inner type.
|
| 719 | #[inline ]
|
| 720 | fn peel_rc(s: Rc<Self>) -> Rc<Inner> {
|
| 721 | // The unsafe contract requires that these two have
|
| 722 | // identical representations, and thus identical pointer metadata.
|
| 723 | // Assert that Self and Inner have the same pointer size,
|
| 724 | // which is the best we can do to assert their metadata is the same type
|
| 725 | // on stable.
|
| 726 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
|
| 727 |
|
| 728 | unsafe {
|
| 729 | // A pointer cast doesn't work here because rustc can't tell that
|
| 730 | // the vtables match (because of the `?Sized` restriction relaxation).
|
| 731 | // A `transmute` doesn't work because the layout of Rc is unspecified.
|
| 732 | //
|
| 733 | // SAFETY:
|
| 734 | // * The unsafe contract requires that pointers to Inner and Self have
|
| 735 | // identical representations, and that the size and alignment of Inner
|
| 736 | // and Self are the same, which meets the safety requirements of
|
| 737 | // Rc::from_raw
|
| 738 | let wrapper_ptr: *const Self = Rc::into_raw(s);
|
| 739 | let inner_ptr: *const Inner = transmute!(wrapper_ptr);
|
| 740 | Rc::from_raw(inner_ptr)
|
| 741 | }
|
| 742 | }
|
| 743 |
|
| 744 | /// Convert an [`Arc`] to the wrapper type into an `Arc` to the inner type.
|
| 745 | #[inline ]
|
| 746 | #[cfg (target_has_atomic = "ptr" )]
|
| 747 | fn peel_arc(s: Arc<Self>) -> Arc<Inner> {
|
| 748 | // The unsafe contract requires that these two have
|
| 749 | // identical representations, and thus identical pointer metadata.
|
| 750 | // Assert that Self and Inner have the same pointer size,
|
| 751 | // which is the best we can do to assert their metadata is the same type
|
| 752 | // on stable.
|
| 753 | assert!(size_of::<*mut Inner>() == size_of::<*mut Self>());
|
| 754 |
|
| 755 | unsafe {
|
| 756 | // A pointer cast doesn't work here because rustc can't tell that
|
| 757 | // the vtables match (because of the `?Sized` restriction relaxation).
|
| 758 | // A `transmute` doesn't work because the layout of Arc is unspecified.
|
| 759 | //
|
| 760 | // SAFETY:
|
| 761 | // * The unsafe contract requires that pointers to Inner and Self have
|
| 762 | // identical representations, and that the size and alignment of Inner
|
| 763 | // and Self are the same, which meets the safety requirements of
|
| 764 | // Arc::from_raw
|
| 765 | let wrapper_ptr: *const Self = Arc::into_raw(s);
|
| 766 | let inner_ptr: *const Inner = transmute!(wrapper_ptr);
|
| 767 | Arc::from_raw(inner_ptr)
|
| 768 | }
|
| 769 | }
|
| 770 | }
|
| 771 |
|
| 772 | impl<I: ?Sized, T: ?Sized + TransparentWrapper<I>> TransparentWrapperAlloc<I>
|
| 773 | for T
|
| 774 | {
|
| 775 | }
|
| 776 |
|
| 777 | /// As `Box<[u8]>`, but remembers the original alignment.
|
| 778 | pub struct BoxBytes {
|
| 779 | // SAFETY: `ptr` is aligned to `layout.align()`, points to
|
| 780 | // `layout.size()` initialized bytes, and, if `layout.size() > 0`,
|
| 781 | // is owned and was allocated with the global allocator with `layout`.
|
| 782 | ptr: NonNull<u8>,
|
| 783 | layout: Layout,
|
| 784 | }
|
| 785 |
|
| 786 | // SAFETY: `BoxBytes` is semantically a `Box<[u8], Global>` with a different allocation alignment,
|
| 787 | // `Box<[u8], Global>` is `Send + Sync`, and changing the allocation alignment has no thread-safety implications.
|
| 788 | unsafe impl Send for BoxBytes {}
|
| 789 | // SAFETY: See `Send` impl
|
| 790 | unsafe impl Sync for BoxBytes {}
|
| 791 |
|
| 792 | impl Deref for BoxBytes {
|
| 793 | type Target = [u8];
|
| 794 |
|
| 795 | fn deref(&self) -> &Self::Target {
|
| 796 | // SAFETY: See type invariant.
|
| 797 | unsafe {
|
| 798 | core::slice::from_raw_parts(self.ptr.as_ptr(), self.layout.size())
|
| 799 | }
|
| 800 | }
|
| 801 | }
|
| 802 |
|
| 803 | impl DerefMut for BoxBytes {
|
| 804 | fn deref_mut(&mut self) -> &mut Self::Target {
|
| 805 | // SAFETY: See type invariant.
|
| 806 | unsafe {
|
| 807 | core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.layout.size())
|
| 808 | }
|
| 809 | }
|
| 810 | }
|
| 811 |
|
| 812 | impl Drop for BoxBytes {
|
| 813 | fn drop(&mut self) {
|
| 814 | if self.layout.size() != 0 {
|
| 815 | // SAFETY: See type invariant: if `self.layout.size() != 0`, then
|
| 816 | // `self.ptr` is owned and was allocated with `self.layout`.
|
| 817 | unsafe { alloc::alloc::dealloc(self.ptr.as_ptr(), self.layout) };
|
| 818 | }
|
| 819 | }
|
| 820 | }
|
| 821 |
|
| 822 | impl<T: ?Sized + sealed::BoxBytesOf> From<Box<T>> for BoxBytes {
|
| 823 | fn from(value: Box<T>) -> Self {
|
| 824 | value.box_bytes_of()
|
| 825 | }
|
| 826 | }
|
| 827 |
|
| 828 | mod sealed {
|
| 829 | use crate::{BoxBytes, PodCastError};
|
| 830 | use alloc::boxed::Box;
|
| 831 |
|
| 832 | pub trait BoxBytesOf {
|
| 833 | fn box_bytes_of(self: Box<Self>) -> BoxBytes;
|
| 834 | }
|
| 835 |
|
| 836 | pub trait FromBoxBytes {
|
| 837 | fn try_from_box_bytes(
|
| 838 | bytes: BoxBytes,
|
| 839 | ) -> Result<Box<Self>, (PodCastError, BoxBytes)>;
|
| 840 | }
|
| 841 | }
|
| 842 |
|
| 843 | impl<T: NoUninit> sealed::BoxBytesOf for T {
|
| 844 | fn box_bytes_of(self: Box<Self>) -> BoxBytes {
|
| 845 | let layout: Layout = Layout::new::<T>();
|
| 846 | let ptr: *mut u8 = Box::into_raw(self) as *mut u8;
|
| 847 | // SAFETY: Box::into_raw() returns a non-null pointer.
|
| 848 | let ptr: NonNull = unsafe { NonNull::new_unchecked(ptr) };
|
| 849 | BoxBytes { ptr, layout }
|
| 850 | }
|
| 851 | }
|
| 852 |
|
| 853 | impl<T: NoUninit> sealed::BoxBytesOf for [T] {
|
| 854 | fn box_bytes_of(self: Box<Self>) -> BoxBytes {
|
| 855 | let layout: Layout = Layout::for_value::<[T]>(&self);
|
| 856 | let ptr: *mut u8 = Box::into_raw(self) as *mut u8;
|
| 857 | // SAFETY: Box::into_raw() returns a non-null pointer.
|
| 858 | let ptr: NonNull = unsafe { NonNull::new_unchecked(ptr) };
|
| 859 | BoxBytes { ptr, layout }
|
| 860 | }
|
| 861 | }
|
| 862 |
|
| 863 | impl sealed::BoxBytesOf for str {
|
| 864 | fn box_bytes_of(self: Box<Self>) -> BoxBytes {
|
| 865 | self.into_boxed_bytes().box_bytes_of()
|
| 866 | }
|
| 867 | }
|
| 868 |
|
| 869 | impl<T: AnyBitPattern> sealed::FromBoxBytes for T {
|
| 870 | fn try_from_box_bytes(
|
| 871 | bytes: BoxBytes,
|
| 872 | ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
|
| 873 | let layout: Layout = Layout::new::<T>();
|
| 874 | if bytes.layout.align() != layout.align() {
|
| 875 | Err((PodCastError::AlignmentMismatch, bytes))
|
| 876 | } else if bytes.layout.size() != layout.size() {
|
| 877 | Err((PodCastError::SizeMismatch, bytes))
|
| 878 | } else {
|
| 879 | let (ptr: NonNull, _) = bytes.into_raw_parts();
|
| 880 | // SAFETY: See BoxBytes type invariant.
|
| 881 | Ok(unsafe { Box::from_raw(ptr.as_ptr() as *mut T) })
|
| 882 | }
|
| 883 | }
|
| 884 | }
|
| 885 |
|
| 886 | impl<T: AnyBitPattern> sealed::FromBoxBytes for [T] {
|
| 887 | fn try_from_box_bytes(
|
| 888 | bytes: BoxBytes,
|
| 889 | ) -> Result<Box<Self>, (PodCastError, BoxBytes)> {
|
| 890 | let single_layout = Layout::new::<T>();
|
| 891 | if bytes.layout.align() != single_layout.align() {
|
| 892 | Err((PodCastError::AlignmentMismatch, bytes))
|
| 893 | } else if (single_layout.size() == 0 && bytes.layout.size() != 0)
|
| 894 | || (single_layout.size() != 0
|
| 895 | && bytes.layout.size() % single_layout.size() != 0)
|
| 896 | {
|
| 897 | Err((PodCastError::OutputSliceWouldHaveSlop, bytes))
|
| 898 | } else {
|
| 899 | let (ptr, layout) = bytes.into_raw_parts();
|
| 900 | let length = if single_layout.size() != 0 {
|
| 901 | layout.size() / single_layout.size()
|
| 902 | } else {
|
| 903 | 0
|
| 904 | };
|
| 905 | let ptr =
|
| 906 | core::ptr::slice_from_raw_parts_mut(ptr.as_ptr() as *mut T, length);
|
| 907 | // SAFETY: See BoxBytes type invariant.
|
| 908 | Ok(unsafe { Box::from_raw(ptr) })
|
| 909 | }
|
| 910 | }
|
| 911 | }
|
| 912 |
|
| 913 | /// Re-interprets `Box<T>` as `BoxBytes`.
|
| 914 | ///
|
| 915 | /// `T` must be either [`Sized`] and [`NoUninit`],
|
| 916 | /// [`[U]`](slice) where `U: NoUninit`, or [`str`].
|
| 917 | #[inline ]
|
| 918 | pub fn box_bytes_of<T: sealed::BoxBytesOf + ?Sized>(input: Box<T>) -> BoxBytes {
|
| 919 | input.box_bytes_of()
|
| 920 | }
|
| 921 |
|
| 922 | /// Re-interprets `BoxBytes` as `Box<T>`.
|
| 923 | ///
|
| 924 | /// `T` must be either [`Sized`] + [`AnyBitPattern`], or
|
| 925 | /// [`[U]`](slice) where `U: AnyBitPattern`.
|
| 926 | ///
|
| 927 | /// ## Panics
|
| 928 | ///
|
| 929 | /// This is [`try_from_box_bytes`] but will panic on error and the input will be
|
| 930 | /// dropped.
|
| 931 | #[inline ]
|
| 932 | #[cfg_attr (feature = "track_caller" , track_caller)]
|
| 933 | pub fn from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
|
| 934 | input: BoxBytes,
|
| 935 | ) -> Box<T> {
|
| 936 | try_from_box_bytes(input).map_err(|(error: PodCastError, _)| error).unwrap()
|
| 937 | }
|
| 938 |
|
| 939 | /// Re-interprets `BoxBytes` as `Box<T>`.
|
| 940 | ///
|
| 941 | /// `T` must be either [`Sized`] + [`AnyBitPattern`], or
|
| 942 | /// [`[U]`](slice) where `U: AnyBitPattern`.
|
| 943 | ///
|
| 944 | /// Returns `Err`:
|
| 945 | /// * If the input isn't aligned for `T`.
|
| 946 | /// * If `T: Sized` and the input's length isn't exactly the size of `T`.
|
| 947 | /// * If `T = [U]` and the input's length isn't exactly a multiple of the size
|
| 948 | /// of `U`.
|
| 949 | #[inline ]
|
| 950 | pub fn try_from_box_bytes<T: sealed::FromBoxBytes + ?Sized>(
|
| 951 | input: BoxBytes,
|
| 952 | ) -> Result<Box<T>, (PodCastError, BoxBytes)> {
|
| 953 | T::try_from_box_bytes(input)
|
| 954 | }
|
| 955 |
|
| 956 | impl BoxBytes {
|
| 957 | /// Constructs a `BoxBytes` from its raw parts.
|
| 958 | ///
|
| 959 | /// # Safety
|
| 960 | ///
|
| 961 | /// The pointer is owned, has been allocated with the provided layout, and
|
| 962 | /// points to `layout.size()` initialized bytes.
|
| 963 | pub unsafe fn from_raw_parts(ptr: NonNull<u8>, layout: Layout) -> Self {
|
| 964 | BoxBytes { ptr, layout }
|
| 965 | }
|
| 966 |
|
| 967 | /// Deconstructs a `BoxBytes` into its raw parts.
|
| 968 | ///
|
| 969 | /// The pointer is owned, has been allocated with the provided layout, and
|
| 970 | /// points to `layout.size()` initialized bytes.
|
| 971 | pub fn into_raw_parts(self) -> (NonNull<u8>, Layout) {
|
| 972 | let me = ManuallyDrop::new(self);
|
| 973 | (me.ptr, me.layout)
|
| 974 | }
|
| 975 |
|
| 976 | /// Returns the original layout.
|
| 977 | pub fn layout(&self) -> Layout {
|
| 978 | self.layout
|
| 979 | }
|
| 980 | }
|
| 981 | |