1 | use super::sealed::Sealed; |
2 | use crate::simd::{cmp::SimdPartialEq, num::SimdUint, LaneCount, Mask, Simd, SupportedLaneCount}; |
3 | |
4 | /// Operations on SIMD vectors of mutable pointers. |
5 | pub trait SimdMutPtr: Copy + Sealed { |
6 | /// Vector of `usize` with the same number of elements. |
7 | type Usize; |
8 | |
9 | /// Vector of `isize` with the same number of elements. |
10 | type Isize; |
11 | |
12 | /// Vector of const pointers with the same number of elements. |
13 | type CastPtr<T>; |
14 | |
15 | /// Vector of constant pointers to the same type. |
16 | type ConstPtr; |
17 | |
18 | /// Mask type used for manipulating this SIMD vector type. |
19 | type Mask; |
20 | |
21 | /// Returns `true` for each element that is null. |
22 | fn is_null(self) -> Self::Mask; |
23 | |
24 | /// Casts to a pointer of another type. |
25 | /// |
26 | /// Equivalent to calling [`pointer::cast`] on each element. |
27 | fn cast<T>(self) -> Self::CastPtr<T>; |
28 | |
29 | /// Changes constness without changing the type. |
30 | /// |
31 | /// Equivalent to calling [`pointer::cast_const`] on each element. |
32 | fn cast_const(self) -> Self::ConstPtr; |
33 | |
34 | /// Gets the "address" portion of the pointer. |
35 | /// |
36 | /// This method discards pointer semantic metadata, so the result cannot be |
37 | /// directly cast into a valid pointer. |
38 | /// |
39 | /// Equivalent to calling [`pointer::addr`] on each element. |
40 | fn addr(self) -> Self::Usize; |
41 | |
42 | /// Creates a new pointer with the given address. |
43 | /// |
44 | /// This performs the same operation as a cast, but copies the *address-space* and |
45 | /// *provenance* of `self` to the new pointer. |
46 | /// |
47 | /// Equivalent to calling [`pointer::with_addr`] on each element. |
48 | fn with_addr(self, addr: Self::Usize) -> Self; |
49 | |
50 | /// Exposes the "provenance" part of the pointer for future use in |
51 | /// [`Self::with_exposed_provenance`] and returns the "address" portion. |
52 | fn expose_provenance(self) -> Self::Usize; |
53 | |
54 | /// Convert an address back to a pointer, picking up a previously "exposed" provenance. |
55 | /// |
56 | /// Equivalent to calling [`core::ptr::with_exposed_provenance_mut`] on each element. |
57 | fn with_exposed_provenance(addr: Self::Usize) -> Self; |
58 | |
59 | /// Calculates the offset from a pointer using wrapping arithmetic. |
60 | /// |
61 | /// Equivalent to calling [`pointer::wrapping_offset`] on each element. |
62 | fn wrapping_offset(self, offset: Self::Isize) -> Self; |
63 | |
64 | /// Calculates the offset from a pointer using wrapping arithmetic. |
65 | /// |
66 | /// Equivalent to calling [`pointer::wrapping_add`] on each element. |
67 | fn wrapping_add(self, count: Self::Usize) -> Self; |
68 | |
69 | /// Calculates the offset from a pointer using wrapping arithmetic. |
70 | /// |
71 | /// Equivalent to calling [`pointer::wrapping_sub`] on each element. |
72 | fn wrapping_sub(self, count: Self::Usize) -> Self; |
73 | } |
74 | |
75 | impl<T, const N: usize> Sealed for Simd<*mut T, N> where LaneCount<N>: SupportedLaneCount {} |
76 | |
77 | impl<T, const N: usize> SimdMutPtr for Simd<*mut T, N> |
78 | where |
79 | LaneCount<N>: SupportedLaneCount, |
80 | { |
81 | type Usize = Simd<usize, N>; |
82 | type Isize = Simd<isize, N>; |
83 | type CastPtr<U> = Simd<*mut U, N>; |
84 | type ConstPtr = Simd<*const T, N>; |
85 | type Mask = Mask<isize, N>; |
86 | |
87 | #[inline ] |
88 | fn is_null(self) -> Self::Mask { |
89 | Simd::splat(core::ptr::null_mut()).simd_eq(self) |
90 | } |
91 | |
92 | #[inline ] |
93 | fn cast<U>(self) -> Self::CastPtr<U> { |
94 | // SimdElement currently requires zero-sized metadata, so this should never fail. |
95 | // If this ever changes, `simd_cast_ptr` should produce a post-mono error. |
96 | use core::{mem::size_of, ptr::Pointee}; |
97 | assert_eq!(size_of::<<T as Pointee>::Metadata>(), 0); |
98 | assert_eq!(size_of::<<U as Pointee>::Metadata>(), 0); |
99 | |
100 | // Safety: pointers can be cast |
101 | unsafe { core::intrinsics::simd::simd_cast_ptr(self) } |
102 | } |
103 | |
104 | #[inline ] |
105 | fn cast_const(self) -> Self::ConstPtr { |
106 | // Safety: pointers can be cast |
107 | unsafe { core::intrinsics::simd::simd_cast_ptr(self) } |
108 | } |
109 | |
110 | #[inline ] |
111 | fn addr(self) -> Self::Usize { |
112 | // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. |
113 | // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the |
114 | // provenance). |
115 | unsafe { core::mem::transmute_copy(&self) } |
116 | } |
117 | |
118 | #[inline ] |
119 | fn with_addr(self, addr: Self::Usize) -> Self { |
120 | // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. |
121 | // |
122 | // In the mean-time, this operation is defined to be "as if" it was |
123 | // a wrapping_offset, so we can emulate it as such. This should properly |
124 | // restore pointer provenance even under today's compiler. |
125 | self.cast::<u8>() |
126 | .wrapping_offset(addr.cast::<isize>() - self.addr().cast::<isize>()) |
127 | .cast() |
128 | } |
129 | |
130 | #[inline ] |
131 | fn expose_provenance(self) -> Self::Usize { |
132 | // Safety: `self` is a pointer vector |
133 | unsafe { core::intrinsics::simd::simd_expose_provenance(self) } |
134 | } |
135 | |
136 | #[inline ] |
137 | fn with_exposed_provenance(addr: Self::Usize) -> Self { |
138 | // Safety: `self` is a pointer vector |
139 | unsafe { core::intrinsics::simd::simd_with_exposed_provenance(addr) } |
140 | } |
141 | |
142 | #[inline ] |
143 | fn wrapping_offset(self, count: Self::Isize) -> Self { |
144 | // Safety: simd_arith_offset takes a vector of pointers and a vector of offsets |
145 | unsafe { core::intrinsics::simd::simd_arith_offset(self, count) } |
146 | } |
147 | |
148 | #[inline ] |
149 | fn wrapping_add(self, count: Self::Usize) -> Self { |
150 | self.wrapping_offset(count.cast()) |
151 | } |
152 | |
153 | #[inline ] |
154 | fn wrapping_sub(self, count: Self::Usize) -> Self { |
155 | self.wrapping_offset(-count.cast::<isize>()) |
156 | } |
157 | } |
158 | |