1// Copyright 2023 The Fuchsia Authors
2//
3// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0
4// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
5// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
6// This file may not be copied, modified, or distributed except according to
7// those terms.
8
9#[path = "third_party/rust/layout.rs"]
10pub(crate) mod core_layout;
11
12use core::{mem, num::NonZeroUsize};
13
14pub(crate) mod ptr {
15 use core::{
16 fmt::{Debug, Formatter},
17 marker::PhantomData,
18 ptr::NonNull,
19 };
20
21 use crate::{util::AsAddress, KnownLayout, _CastType};
22
23 /// A raw pointer with more restrictions.
24 ///
25 /// `Ptr<T>` is similar to `NonNull<T>`, but it is more restrictive in the
26 /// following ways:
27 /// - It must derive from a valid allocation
28 /// - It must reference a byte range which is contained inside the
29 /// allocation from which it derives
30 /// - As a consequence, the byte range it references must have a size
31 /// which does not overflow `isize`
32 /// - It must satisfy `T`'s alignment requirement
33 ///
34 /// Thanks to these restrictions, it is easier to prove the soundness of
35 /// some operations using `Ptr`s.
36 ///
37 /// `Ptr<'a, T>` is [covariant] in `'a` and `T`.
38 ///
39 /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
40 pub struct Ptr<'a, T: 'a + ?Sized> {
41 // INVARIANTS:
42 // - `ptr` is derived from some valid Rust allocation, `A`
43 // - `ptr` has the same provenance as `A`
44 // - `ptr` addresses a byte range which is entirely contained in `A`
45 // - `ptr` addresses a byte range whose length fits in an `isize`
46 // - `ptr` addresses a byte range which does not wrap around the address
47 // space
48 // - `ptr` is validly-aligned for `T`
49 // - `A` is guaranteed to live for at least `'a`
50 // - `T: 'a`
51 ptr: NonNull<T>,
52 _lifetime: PhantomData<&'a ()>,
53 }
54
55 impl<'a, T: ?Sized> Copy for Ptr<'a, T> {}
56 impl<'a, T: ?Sized> Clone for Ptr<'a, T> {
57 #[inline]
58 fn clone(&self) -> Self {
59 *self
60 }
61 }
62
63 impl<'a, T: ?Sized> Ptr<'a, T> {
64 /// Returns a shared reference to the value.
65 ///
66 /// # Safety
67 ///
68 /// For the duration of `'a`:
69 /// - The referenced memory must contain a validly-initialized `T` for
70 /// the duration of `'a`.
71 /// - The referenced memory must not also be referenced by any mutable
72 /// references.
73 /// - The referenced memory must not be mutated, even via an
74 /// [`UnsafeCell`].
75 /// - There must not exist any references to the same memory region
76 /// which contain `UnsafeCell`s at byte ranges which are not identical
77 /// to the byte ranges at which `T` contains `UnsafeCell`s.
78 ///
79 /// [`UnsafeCell`]: core::cell::UnsafeCell
80 // TODO(#429): The safety requirements are likely overly-restrictive.
81 // Notably, mutation via `UnsafeCell`s is probably fine. Once the rules
82 // are more clearly defined, we should relax the safety requirements.
83 // For an example of why this is subtle, see:
84 // https://github.com/rust-lang/unsafe-code-guidelines/issues/463#issuecomment-1736771593
85 #[allow(unused)]
86 pub(crate) unsafe fn as_ref(&self) -> &'a T {
87 // SAFETY:
88 // - By invariant, `self.ptr` is properly-aligned for `T`.
89 // - By invariant, `self.ptr` is "dereferenceable" in that it points
90 // to a single allocation.
91 // - By invariant, the allocation is live for `'a`.
92 // - The caller promises that no mutable references exist to this
93 // region during `'a`.
94 // - The caller promises that `UnsafeCell`s match exactly.
95 // - The caller promises that no mutation will happen during `'a`,
96 // even via `UnsafeCell`s.
97 // - The caller promises that the memory region contains a
98 // validly-intialized `T`.
99 unsafe { self.ptr.as_ref() }
100 }
101
102 /// Casts to a different (unsized) target type.
103 ///
104 /// # Safety
105 ///
106 /// The caller promises that
107 /// - `cast(p)` is implemented exactly as follows: `|p: *mut T| p as
108 /// *mut U`.
109 /// - The size of the object referenced by the resulting pointer is less
110 /// than or equal to the size of the object referenced by `self`.
111 /// - The alignment of `U` is less than or equal to the alignment of
112 /// `T`.
113 pub(crate) unsafe fn cast_unsized<U: 'a + ?Sized, F: FnOnce(*mut T) -> *mut U>(
114 self,
115 cast: F,
116 ) -> Ptr<'a, U> {
117 let ptr = cast(self.ptr.as_ptr());
118 // SAFETY: Caller promises that `cast` is just an `as` cast. We call
119 // `cast` on `self.ptr.as_ptr()`, which is non-null by construction.
120 let ptr = unsafe { NonNull::new_unchecked(ptr) };
121 // SAFETY:
122 // - By invariant, `self.ptr` is derived from some valid Rust
123 // allocation, and since `ptr` is just `self.ptr as *mut U`, so is
124 // `ptr`.
125 // - By invariant, `self.ptr` has the same provenance as `A`, and so
126 // the same is true of `ptr`.
127 // - By invariant, `self.ptr` addresses a byte range which is
128 // entirely contained in `A`, and so the same is true of `ptr`.
129 // - By invariant, `self.ptr` addresses a byte range whose length
130 // fits in an `isize`, and so the same is true of `ptr`.
131 // - By invariant, `self.ptr` addresses a byte range which does not
132 // wrap around the address space, and so the same is true of
133 // `ptr`.
134 // - By invariant, `self.ptr` is validly-aligned for `T`. Since
135 // `ptr` has the same address, and since the caller promises that
136 // the alignment of `U` is less than or equal to the alignment of
137 // `T`, `ptr` is validly-aligned for `U`.
138 // - By invariant, `A` is guaranteed to live for at least `'a`.
139 // - `U: 'a`
140 Ptr { ptr, _lifetime: PhantomData }
141 }
142 }
143
144 impl<'a> Ptr<'a, [u8]> {
145 /// Attempts to cast `self` to a `U` using the given cast type.
146 ///
147 /// Returns `None` if the resulting `U` would be invalidly-aligned or if
148 /// no `U` can fit in `self`. On success, returns a pointer to the
149 /// largest-possible `U` which fits in `self`.
150 ///
151 /// # Safety
152 ///
153 /// The caller may assume that this implementation is correct, and may
154 /// rely on that assumption for the soundness of their code. In
155 /// particular, the caller may assume that, if `try_cast_into` returns
156 /// `Some((ptr, split_at))`, then:
157 /// - If this is a prefix cast, `ptr` refers to the byte range `[0,
158 /// split_at)` in `self`.
159 /// - If this is a suffix cast, `ptr` refers to the byte range
160 /// `[split_at, self.len())` in `self`.
161 ///
162 /// # Panics
163 ///
164 /// Panics if `U` is a DST whose trailing slice element is zero-sized.
165 pub(crate) fn try_cast_into<U: 'a + ?Sized + KnownLayout>(
166 &self,
167 cast_type: _CastType,
168 ) -> Option<(Ptr<'a, U>, usize)> {
169 // PANICS: By invariant, the byte range addressed by `self.ptr` does
170 // not wrap around the address space. This implies that the sum of
171 // the address (represented as a `usize`) and length do not overflow
172 // `usize`, as required by `validate_cast_and_convert_metadata`.
173 // Thus, this call to `validate_cast_and_convert_metadata` won't
174 // panic.
175 let (elems, split_at) = U::LAYOUT.validate_cast_and_convert_metadata(
176 AsAddress::addr(self.ptr.as_ptr()),
177 self.len(),
178 cast_type,
179 )?;
180 let offset = match cast_type {
181 _CastType::_Prefix => 0,
182 _CastType::_Suffix => split_at,
183 };
184
185 let ptr = self.ptr.cast::<u8>().as_ptr();
186 // SAFETY: `offset` is either `0` or `split_at`.
187 // `validate_cast_and_convert_metadata` promises that `split_at` is
188 // in the range `[0, self.len()]`. Thus, in both cases, `offset` is
189 // in `[0, self.len()]`. Thus:
190 // - The resulting pointer is in or one byte past the end of the
191 // same byte range as `self.ptr`. Since, by invariant, `self.ptr`
192 // addresses a byte range entirely contained within a single
193 // allocation, the pointer resulting from this operation is within
194 // or one byte past the end of that same allocation.
195 // - By invariant, `self.len() <= isize::MAX`. Since `offset <=
196 // self.len()`, `offset <= isize::MAX`.
197 // - By invariant, `self.ptr` addresses a byte range which does not
198 // wrap around the address space. This means that the base pointer
199 // plus the `self.len()` does not overflow `usize`. Since `offset
200 // <= self.len()`, this addition does not overflow `usize`.
201 let base = unsafe { ptr.add(offset) };
202 // SAFETY: Since `add` is not allowed to wrap around, the preceding line
203 // produces a pointer whose address is greater than or equal to that of
204 // `ptr`. Since `ptr` is a `NonNull`, `base` is also non-null.
205 let base = unsafe { NonNull::new_unchecked(base) };
206 let ptr = U::raw_from_ptr_len(base, elems);
207 // SAFETY:
208 // - By invariant, `self.ptr` is derived from some valid Rust
209 // allocation, `A`, and has the same provenance as `A`. All
210 // operations performed on `self.ptr` and values derived from it
211 // in this method preserve provenance, so:
212 // - `ptr` is derived from a valid Rust allocation, `A`.
213 // - `ptr` has the same provenance as `A`.
214 // - `validate_cast_and_convert_metadata` promises that the object
215 // described by `elems` and `split_at` lives at a byte range which
216 // is a subset of the input byte range. Thus:
217 // - Since, by invariant, `self.ptr` addresses a byte range
218 // entirely contained in `A`, so does `ptr`.
219 // - Since, by invariant, `self.ptr` addresses a range whose
220 // length is not longer than `isize::MAX` bytes, so does `ptr`.
221 // - Since, by invariant, `self.ptr` addresses a range which does
222 // not wrap around the address space, so does `ptr`.
223 // - `validate_cast_and_convert_metadata` promises that the object
224 // described by `split_at` is validly-aligned for `U`.
225 // - By invariant on `self`, `A` is guaranteed to live for at least
226 // `'a`.
227 // - `U: 'a` by trait bound.
228 Some((Ptr { ptr, _lifetime: PhantomData }, split_at))
229 }
230
231 /// Attempts to cast `self` into a `U`, failing if all of the bytes of
232 /// `self` cannot be treated as a `U`.
233 ///
234 /// In particular, this method fails if `self` is not validly-aligned
235 /// for `U` or if `self`'s size is not a valid size for `U`.
236 ///
237 /// # Safety
238 ///
239 /// On success, the caller may assume that the returned pointer
240 /// references the same byte range as `self`.
241 #[allow(unused)]
242 #[inline(always)]
243 pub(crate) fn try_cast_into_no_leftover<U: 'a + ?Sized + KnownLayout>(
244 &self,
245 ) -> Option<Ptr<'a, U>> {
246 // TODO(#67): Remove this allow. See NonNulSlicelExt for more
247 // details.
248 #[allow(unstable_name_collisions)]
249 match self.try_cast_into(_CastType::_Prefix) {
250 Some((slf, split_at)) if split_at == self.len() => Some(slf),
251 Some(_) | None => None,
252 }
253 }
254 }
255
256 impl<'a, T> Ptr<'a, [T]> {
257 /// The number of slice elements referenced by `self`.
258 fn len(&self) -> usize {
259 #[allow(clippy::as_conversions)]
260 let slc = self.ptr.as_ptr() as *const [()];
261 // SAFETY:
262 // - `()` has alignment 1, so `slc` is trivially aligned.
263 // - `slc` was derived from a non-null pointer.
264 // - The size is 0 regardless of the length, so it is sound to
265 // materialize a reference regardless of location.
266 // - By invariant, `self.ptr` has valid provenance.
267 let slc = unsafe { &*slc };
268 // This is correct because the preceding `as` cast preserves the
269 // number of slice elements. Per
270 // https://doc.rust-lang.org/nightly/reference/expressions/operator-expr.html#slice-dst-pointer-to-pointer-cast:
271 //
272 // For slice types like `[T]` and `[U]`, the raw pointer types
273 // `*const [T]`, `*mut [T]`, `*const [U]`, and `*mut [U]` encode
274 // the number of elements in this slice. Casts between these raw
275 // pointer types preserve the number of elements. Note that, as a
276 // consequence, such casts do *not* necessarily preserve the size
277 // of the pointer's referent (e.g., casting `*const [u16]` to
278 // `*const [u8]` will result in a raw pointer which refers to an
279 // object of half the size of the original). The same holds for
280 // `str` and any compound type whose unsized tail is a slice type,
281 // such as struct `Foo(i32, [u8])` or `(u64, Foo)`.
282 //
283 // TODO(#429),
284 // TODO(https://github.com/rust-lang/reference/pull/1417): Once this
285 // text is available on the Stable docs, cite those instead of the
286 // Nightly docs.
287 slc.len()
288 }
289 }
290
291 impl<'a, T: 'a + ?Sized> From<&'a T> for Ptr<'a, T> {
292 #[inline(always)]
293 fn from(t: &'a T) -> Ptr<'a, T> {
294 // SAFETY: `t` points to a valid Rust allocation, `A`, by
295 // construction. Thus:
296 // - `ptr` is derived from `A`
297 // - Since we use `NonNull::from`, which preserves provenance, `ptr`
298 // has the same provenance as `A`
299 // - Since `NonNull::from` creates a pointer which addresses the
300 // same bytes as `t`, `ptr` addresses a byte range entirely
301 // contained in (in this case, identical to) `A`
302 // - Since `t: &T`, it addresses no more than `isize::MAX` bytes [1]
303 // - Since `t: &T`, it addresses a byte range which does not wrap
304 // around the address space [2]
305 // - Since it is constructed from a valid `&T`, `ptr` is
306 // validly-aligned for `T`
307 // - Since `t: &'a T`, the allocation `A` is guaranteed to live for
308 // at least `'a`
309 // - `T: 'a` by trait bound
310 //
311 // TODO(#429),
312 // TODO(https://github.com/rust-lang/rust/issues/116181): Once it's
313 // documented, reference the guarantee that `NonNull::from`
314 // preserves provenance.
315 //
316 // TODO(#429),
317 // TODO(https://github.com/rust-lang/unsafe-code-guidelines/issues/465):
318 // - [1] Where does the reference document that allocations fit in
319 // `isize`?
320 // - [2] Where does the reference document that allocations don't
321 // wrap around the address space?
322 Ptr { ptr: NonNull::from(t), _lifetime: PhantomData }
323 }
324 }
325
326 impl<'a, T: 'a + ?Sized> Debug for Ptr<'a, T> {
327 #[inline]
328 fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
329 self.ptr.fmt(f)
330 }
331 }
332
333 #[cfg(test)]
334 mod tests {
335 use core::mem::{self, MaybeUninit};
336
337 use super::*;
338 use crate::{util::testutil::AU64, FromBytes};
339
340 #[test]
341 fn test_ptrtry_cast_into_soundness() {
342 // This test is designed so that if `Ptr::try_cast_into_xxx` are
343 // buggy, it will manifest as unsoundness that Miri can detect.
344
345 // - If `size_of::<T>() == 0`, `N == 4`
346 // - Else, `N == 4 * size_of::<T>()`
347 fn test<const N: usize, T: ?Sized + KnownLayout + FromBytes>() {
348 let mut bytes = [MaybeUninit::<u8>::uninit(); N];
349 let initialized = [MaybeUninit::new(0u8); N];
350 for start in 0..=bytes.len() {
351 for end in start..=bytes.len() {
352 // Set all bytes to uninitialized other than those in
353 // the range we're going to pass to `try_cast_from`.
354 // This allows Miri to detect out-of-bounds reads
355 // because they read uninitialized memory. Without this,
356 // some out-of-bounds reads would still be in-bounds of
357 // `bytes`, and so might spuriously be accepted.
358 bytes = [MaybeUninit::<u8>::uninit(); N];
359 let bytes = &mut bytes[start..end];
360 // Initialize only the byte range we're going to pass to
361 // `try_cast_from`.
362 bytes.copy_from_slice(&initialized[start..end]);
363
364 let bytes = {
365 let bytes: *const [MaybeUninit<u8>] = bytes;
366 #[allow(clippy::as_conversions)]
367 let bytes = bytes as *const [u8];
368 // SAFETY: We just initialized these bytes to valid
369 // `u8`s.
370 unsafe { &*bytes }
371 };
372
373 /// # Safety
374 ///
375 /// - `slf` must reference a byte range which is
376 /// entirely initialized.
377 /// - `slf` must reference a byte range which is only
378 /// referenced by shared references which do not
379 /// contain `UnsafeCell`s during its lifetime.
380 unsafe fn validate_and_get_len<T: ?Sized + KnownLayout + FromBytes>(
381 slf: Ptr<'_, T>,
382 ) -> usize {
383 // SAFETY:
384 // - Since all bytes in `slf` are initialized and
385 // `T: FromBytes`, `slf` contains a valid `T`.
386 // - The caller promises that the referenced memory
387 // is not also referenced by any mutable
388 // references.
389 // - The caller promises that the referenced memory
390 // is not also referenced as a type which contains
391 // `UnsafeCell`s.
392 let t = unsafe { slf.as_ref() };
393
394 let bytes = {
395 let len = mem::size_of_val(t);
396 let t: *const T = t;
397 // SAFETY:
398 // - We know `t`'s bytes are all initialized
399 // because we just read it from `slf`, which
400 // points to an initialized range of bytes. If
401 // there's a bug and this doesn't hold, then
402 // that's exactly what we're hoping Miri will
403 // catch!
404 // - Since `T: FromBytes`, `T` doesn't contain
405 // any `UnsafeCell`s, so it's okay for `t: T`
406 // and a `&[u8]` to the same memory to be
407 // alive concurrently.
408 unsafe { core::slice::from_raw_parts(t.cast::<u8>(), len) }
409 };
410
411 // This assertion ensures that `t`'s bytes are read
412 // and compared to another value, which in turn
413 // ensures that Miri gets a chance to notice if any
414 // of `t`'s bytes are uninitialized, which they
415 // shouldn't be (see the comment above).
416 assert_eq!(bytes, vec![0u8; bytes.len()]);
417
418 mem::size_of_val(t)
419 }
420
421 for cast_type in [_CastType::_Prefix, _CastType::_Suffix] {
422 if let Some((slf, split_at)) =
423 Ptr::from(bytes).try_cast_into::<T>(cast_type)
424 {
425 // SAFETY: All bytes in `bytes` have been
426 // initialized.
427 let len = unsafe { validate_and_get_len(slf) };
428 match cast_type {
429 _CastType::_Prefix => assert_eq!(split_at, len),
430 _CastType::_Suffix => assert_eq!(split_at, bytes.len() - len),
431 }
432 }
433 }
434
435 if let Some(slf) = Ptr::from(bytes).try_cast_into_no_leftover::<T>() {
436 // SAFETY: All bytes in `bytes` have been
437 // initialized.
438 let len = unsafe { validate_and_get_len(slf) };
439 assert_eq!(len, bytes.len());
440 }
441 }
442 }
443 }
444
445 macro_rules! test {
446 ($($ty:ty),*) => {
447 $({
448 const S: usize = core::mem::size_of::<$ty>();
449 const N: usize = if S == 0 { 4 } else { S * 4 };
450 test::<N, $ty>();
451 // We don't support casting into DSTs whose trailing slice
452 // element is a ZST.
453 if S > 0 {
454 test::<N, [$ty]>();
455 }
456 // TODO: Test with a slice DST once we have any that
457 // implement `KnownLayout + FromBytes`.
458 })*
459 };
460 }
461
462 test!(());
463 test!(u8, u16, u32, u64, u128, usize, AU64);
464 test!(i8, i16, i32, i64, i128, isize);
465 test!(f32, f64);
466 }
467 }
468}
469
470pub(crate) trait AsAddress {
471 fn addr(self) -> usize;
472}
473
474impl<'a, T: ?Sized> AsAddress for &'a T {
475 #[inline(always)]
476 fn addr(self) -> usize {
477 let ptr: *const T = self;
478 AsAddress::addr(self:ptr)
479 }
480}
481
482impl<'a, T: ?Sized> AsAddress for &'a mut T {
483 #[inline(always)]
484 fn addr(self) -> usize {
485 let ptr: *const T = self;
486 AsAddress::addr(self:ptr)
487 }
488}
489
490impl<T: ?Sized> AsAddress for *const T {
491 #[inline(always)]
492 fn addr(self) -> usize {
493 // TODO(#181), TODO(https://github.com/rust-lang/rust/issues/95228): Use
494 // `.addr()` instead of `as usize` once it's stable, and get rid of this
495 // `allow`. Currently, `as usize` is the only way to accomplish this.
496 #[allow(clippy::as_conversions)]
497 #[cfg_attr(__INTERNAL_USE_ONLY_NIGHLTY_FEATURES_IN_TESTS, allow(lossy_provenance_casts))]
498 return self.cast::<()>() as usize;
499 }
500}
501
502impl<T: ?Sized> AsAddress for *mut T {
503 #[inline(always)]
504 fn addr(self) -> usize {
505 let ptr: *const T = self;
506 AsAddress::addr(self:ptr)
507 }
508}
509
510/// Is `t` aligned to `mem::align_of::<U>()`?
511#[inline(always)]
512pub(crate) fn aligned_to<T: AsAddress, U>(t: T) -> bool {
513 // `mem::align_of::<U>()` is guaranteed to return a non-zero value, which in
514 // turn guarantees that this mod operation will not panic.
515 #[allow(clippy::arithmetic_side_effects)]
516 let remainder: usize = t.addr() % mem::align_of::<U>();
517 remainder == 0
518}
519
520/// Round `n` down to the largest value `m` such that `m <= n` and `m % align ==
521/// 0`.
522///
523/// # Panics
524///
525/// May panic if `align` is not a power of two. Even if it doesn't panic in this
526/// case, it will produce nonsense results.
527#[inline(always)]
528pub(crate) const fn round_down_to_next_multiple_of_alignment(
529 n: usize,
530 align: NonZeroUsize,
531) -> usize {
532 let align: usize = align.get();
533 debug_assert!(align.is_power_of_two());
534
535 // Subtraction can't underflow because `align.get() >= 1`.
536 #[allow(clippy::arithmetic_side_effects)]
537 let mask: usize = !(align - 1);
538 n & mask
539}
540
541pub(crate) const fn max(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize {
542 if a.get() < b.get() {
543 b
544 } else {
545 a
546 }
547}
548
549pub(crate) const fn min(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize {
550 if a.get() > b.get() {
551 b
552 } else {
553 a
554 }
555}
556
557/// Since we support multiple versions of Rust, there are often features which
558/// have been stabilized in the most recent stable release which do not yet
559/// exist (stably) on our MSRV. This module provides polyfills for those
560/// features so that we can write more "modern" code, and just remove the
561/// polyfill once our MSRV supports the corresponding feature. Without this,
562/// we'd have to write worse/more verbose code and leave TODO comments sprinkled
563/// throughout the codebase to update to the new pattern once it's stabilized.
564///
565/// Each trait is imported as `_` at the crate root; each polyfill should "just
566/// work" at usage sites.
567pub(crate) mod polyfills {
568 use core::ptr::{self, NonNull};
569
570 // A polyfill for `NonNull::slice_from_raw_parts` that we can use before our
571 // MSRV is 1.70, when that function was stabilized.
572 //
573 // TODO(#67): Once our MSRV is 1.70, remove this.
574 pub(crate) trait NonNullExt<T> {
575 fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]>;
576 }
577
578 impl<T> NonNullExt<T> for NonNull<T> {
579 #[inline(always)]
580 fn slice_from_raw_parts(data: Self, len: usize) -> NonNull<[T]> {
581 let ptr: *mut [T] = ptr::slice_from_raw_parts_mut(data:data.as_ptr(), len);
582 // SAFETY: `ptr` is converted from `data`, which is non-null.
583 unsafe { NonNull::new_unchecked(ptr) }
584 }
585 }
586}
587
588#[cfg(test)]
589pub(crate) mod testutil {
590 use core::fmt::{self, Display, Formatter};
591
592 use crate::*;
593
594 /// A `T` which is aligned to at least `align_of::<A>()`.
595 #[derive(Default)]
596 pub(crate) struct Align<T, A> {
597 pub(crate) t: T,
598 _a: [A; 0],
599 }
600
601 impl<T: Default, A> Align<T, A> {
602 pub(crate) fn set_default(&mut self) {
603 self.t = T::default();
604 }
605 }
606
607 impl<T, A> Align<T, A> {
608 pub(crate) const fn new(t: T) -> Align<T, A> {
609 Align { t, _a: [] }
610 }
611 }
612
613 // A `u64` with alignment 8.
614 //
615 // Though `u64` has alignment 8 on some platforms, it's not guaranteed.
616 // By contrast, `AU64` is guaranteed to have alignment 8.
617 #[derive(
618 KnownLayout,
619 FromZeroes,
620 FromBytes,
621 AsBytes,
622 Eq,
623 PartialEq,
624 Ord,
625 PartialOrd,
626 Default,
627 Debug,
628 Copy,
629 Clone,
630 )]
631 #[repr(C, align(8))]
632 pub(crate) struct AU64(pub(crate) u64);
633
634 impl AU64 {
635 // Converts this `AU64` to bytes using this platform's endianness.
636 pub(crate) fn to_bytes(self) -> [u8; 8] {
637 crate::transmute!(self)
638 }
639 }
640
641 impl Display for AU64 {
642 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
643 Display::fmt(&self.0, f)
644 }
645 }
646
647 #[derive(
648 FromZeroes, FromBytes, Eq, PartialEq, Ord, PartialOrd, Default, Debug, Copy, Clone,
649 )]
650 #[repr(C)]
651 pub(crate) struct Nested<T, U: ?Sized> {
652 _t: T,
653 _u: U,
654 }
655}
656
657#[cfg(test)]
658mod tests {
659 use super::*;
660
661 #[test]
662 fn test_round_down_to_next_multiple_of_alignment() {
663 fn alt_impl(n: usize, align: NonZeroUsize) -> usize {
664 let mul = n / align.get();
665 mul * align.get()
666 }
667
668 for align in [1, 2, 4, 8, 16] {
669 for n in 0..256 {
670 let align = NonZeroUsize::new(align).unwrap();
671 let want = alt_impl(n, align);
672 let got = round_down_to_next_multiple_of_alignment(n, align);
673 assert_eq!(got, want, "round_down_to_next_multiple_of_alignment({n}, {align})");
674 }
675 }
676 }
677}
678
679#[cfg(kani)]
680mod proofs {
681 use super::*;
682
683 #[kani::proof]
684 fn prove_round_down_to_next_multiple_of_alignment() {
685 fn model_impl(n: usize, align: NonZeroUsize) -> usize {
686 assert!(align.get().is_power_of_two());
687 let mul = n / align.get();
688 mul * align.get()
689 }
690
691 let align: NonZeroUsize = kani::any();
692 kani::assume(align.get().is_power_of_two());
693 let n: usize = kani::any();
694
695 let expected = model_impl(n, align);
696 let actual = round_down_to_next_multiple_of_alignment(n, align);
697 assert_eq!(expected, actual, "round_down_to_next_multiple_of_alignment({n}, {align})");
698 }
699
700 // Restricted to nightly since we use the unstable `usize::next_multiple_of`
701 // in our model implementation.
702 #[cfg(__INTERNAL_USE_ONLY_NIGHLTY_FEATURES_IN_TESTS)]
703 #[kani::proof]
704 fn prove_padding_needed_for() {
705 fn model_impl(len: usize, align: NonZeroUsize) -> usize {
706 let padded = len.next_multiple_of(align.get());
707 let padding = padded - len;
708 padding
709 }
710
711 let align: NonZeroUsize = kani::any();
712 kani::assume(align.get().is_power_of_two());
713 let len: usize = kani::any();
714 // Constrain `len` to valid Rust lengths, since our model implementation
715 // isn't robust to overflow.
716 kani::assume(len <= isize::MAX as usize);
717 kani::assume(align.get() < 1 << 29);
718
719 let expected = model_impl(len, align);
720 let actual = core_layout::padding_needed_for(len, align);
721 assert_eq!(expected, actual, "padding_needed_for({len}, {align})");
722
723 let padded_len = actual + len;
724 assert_eq!(padded_len % align, 0);
725 assert!(padded_len / align >= len / align);
726 }
727}
728