1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::SizedTypeProperties;
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *mut T {
8 /// Returns `true` if the pointer is null.
9 ///
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
13 /// each other.
14 ///
15 /// ## Behavior during const evaluation
16 ///
17 /// When this function is used during const evaluation, it may return `false` for pointers
18 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
19 /// is offset beyond its bounds in such a way that the resulting pointer is null,
20 /// the function will still return `false`. There is no way for CTFE to know
21 /// the absolute position of that memory, so we cannot tell if the pointer is
22 /// null or not.
23 ///
24 /// # Examples
25 ///
26 /// ```
27 /// let mut s = [1, 2, 3];
28 /// let ptr: *mut u32 = s.as_mut_ptr();
29 /// assert!(!ptr.is_null());
30 /// ```
31 #[stable(feature = "rust1", since = "1.0.0")]
32 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
33 #[rustc_diagnostic_item = "ptr_is_null"]
34 #[inline]
35 pub const fn is_null(self) -> bool {
36 #[inline]
37 fn runtime_impl(ptr: *mut u8) -> bool {
38 ptr.addr() == 0
39 }
40
41 #[inline]
42 const fn const_impl(ptr: *mut u8) -> bool {
43 // Compare via a cast to a thin pointer, so fat pointers are only
44 // considering their "data" part for null-ness.
45 match (ptr).guaranteed_eq(null_mut()) {
46 None => false,
47 Some(res) => res,
48 }
49 }
50
51 // SAFETY: The two versions are equivalent at runtime.
52 unsafe { const_eval_select((self as *mut u8,), const_impl, runtime_impl) }
53 }
54
55 /// Casts to a pointer of another type.
56 #[stable(feature = "ptr_cast", since = "1.38.0")]
57 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
58 #[rustc_diagnostic_item = "ptr_cast"]
59 #[inline(always)]
60 pub const fn cast<U>(self) -> *mut U {
61 self as _
62 }
63
64 /// Use the pointer value in a new pointer of another type.
65 ///
66 /// In case `meta` is a (fat) pointer to an unsized type, this operation
67 /// will ignore the pointer part, whereas for (thin) pointers to sized
68 /// types, this has the same effect as a simple cast.
69 ///
70 /// The resulting pointer will have provenance of `self`, i.e., for a fat
71 /// pointer, this operation is semantically the same as creating a new
72 /// fat pointer with the data pointer value of `self` but the metadata of
73 /// `meta`.
74 ///
75 /// # Examples
76 ///
77 /// This function is primarily useful for allowing byte-wise pointer
78 /// arithmetic on potentially fat pointers:
79 ///
80 /// ```
81 /// #![feature(set_ptr_value)]
82 /// # use core::fmt::Debug;
83 /// let mut arr: [i32; 3] = [1, 2, 3];
84 /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
85 /// let thin = ptr as *mut u8;
86 /// unsafe {
87 /// ptr = thin.add(8).with_metadata_of(ptr);
88 /// # assert_eq!(*(ptr as *mut i32), 3);
89 /// println!("{:?}", &*ptr); // will print "3"
90 /// }
91 /// ```
92 #[unstable(feature = "set_ptr_value", issue = "75091")]
93 #[rustc_const_unstable(feature = "set_ptr_value", issue = "75091")]
94 #[must_use = "returns a new pointer rather than modifying its argument"]
95 #[inline]
96 pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
97 where
98 U: ?Sized,
99 {
100 from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
101 }
102
103 /// Changes constness without changing the type.
104 ///
105 /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
106 /// refactored.
107 ///
108 /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
109 /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
110 /// coercion.
111 ///
112 /// [`cast_mut`]: pointer::cast_mut
113 #[stable(feature = "ptr_const_cast", since = "1.65.0")]
114 #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
115 #[rustc_diagnostic_item = "ptr_cast_const"]
116 #[inline(always)]
117 pub const fn cast_const(self) -> *const T {
118 self as _
119 }
120
121 /// Casts a pointer to its raw bits.
122 ///
123 /// This is equivalent to `as usize`, but is more specific to enhance readability.
124 /// The inverse method is [`from_bits`](pointer#method.from_bits-1).
125 ///
126 /// In particular, `*p as usize` and `p as usize` will both compile for
127 /// pointers to numeric types but do very different things, so using this
128 /// helps emphasize that reading the bits was intentional.
129 ///
130 /// # Examples
131 ///
132 /// ```
133 /// #![feature(ptr_to_from_bits)]
134 /// # #[cfg(not(miri))] { // doctest does not work with strict provenance
135 /// let mut array = [13, 42];
136 /// let mut it = array.iter_mut();
137 /// let p0: *mut i32 = it.next().unwrap();
138 /// assert_eq!(<*mut _>::from_bits(p0.to_bits()), p0);
139 /// let p1: *mut i32 = it.next().unwrap();
140 /// assert_eq!(p1.to_bits() - p0.to_bits(), 4);
141 /// }
142 /// ```
143 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
144 #[deprecated(
145 since = "1.67.0",
146 note = "replaced by the `expose_addr` method, or update your code \
147 to follow the strict provenance rules using its APIs"
148 )]
149 #[inline(always)]
150 pub fn to_bits(self) -> usize
151 where
152 T: Sized,
153 {
154 self as usize
155 }
156
157 /// Creates a pointer from its raw bits.
158 ///
159 /// This is equivalent to `as *mut T`, but is more specific to enhance readability.
160 /// The inverse method is [`to_bits`](pointer#method.to_bits-1).
161 ///
162 /// # Examples
163 ///
164 /// ```
165 /// #![feature(ptr_to_from_bits)]
166 /// # #[cfg(not(miri))] { // doctest does not work with strict provenance
167 /// use std::ptr::NonNull;
168 /// let dangling: *mut u8 = NonNull::dangling().as_ptr();
169 /// assert_eq!(<*mut u8>::from_bits(1), dangling);
170 /// }
171 /// ```
172 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
173 #[deprecated(
174 since = "1.67.0",
175 note = "replaced by the `ptr::from_exposed_addr_mut` function, or \
176 update your code to follow the strict provenance rules using its APIs"
177 )]
178 #[allow(fuzzy_provenance_casts)] // this is an unstable and semi-deprecated cast function
179 #[inline(always)]
180 pub fn from_bits(bits: usize) -> Self
181 where
182 T: Sized,
183 {
184 bits as Self
185 }
186
187 /// Gets the "address" portion of the pointer.
188 ///
189 /// This is similar to `self as usize`, which semantically discards *provenance* and
190 /// *address-space* information. However, unlike `self as usize`, casting the returned address
191 /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To
192 /// properly restore the lost information and obtain a dereferenceable pointer, use
193 /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
194 ///
195 /// If using those APIs is not possible because there is no way to preserve a pointer with the
196 /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
197 /// or [`expose_addr`][pointer::expose_addr] and [`from_exposed_addr`][from_exposed_addr]
198 /// instead. However, note that this makes your code less portable and less amenable to tools
199 /// that check for compliance with the Rust memory model.
200 ///
201 /// On most platforms this will produce a value with the same bytes as the original
202 /// pointer, because all the bytes are dedicated to describing the address.
203 /// Platforms which need to store additional information in the pointer may
204 /// perform a change of representation to produce a value containing only the address
205 /// portion of the pointer. What that means is up to the platform to define.
206 ///
207 /// This API and its claimed semantics are part of the Strict Provenance experiment, and as such
208 /// might change in the future (including possibly weakening this so it becomes wholly
209 /// equivalent to `self as usize`). See the [module documentation][crate::ptr] for details.
210 #[must_use]
211 #[inline(always)]
212 #[unstable(feature = "strict_provenance", issue = "95228")]
213 pub fn addr(self) -> usize {
214 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
215 // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
216 // provenance).
217 unsafe { mem::transmute(self.cast::<()>()) }
218 }
219
220 /// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future
221 /// use in [`from_exposed_addr`][].
222 ///
223 /// This is equivalent to `self as usize`, which semantically discards *provenance* and
224 /// *address-space* information. Furthermore, this (like the `as` cast) has the implicit
225 /// side-effect of marking the provenance as 'exposed', so on platforms that support it you can
226 /// later call [`from_exposed_addr_mut`][] to reconstitute the original pointer including its
227 /// provenance. (Reconstructing address space information, if required, is your responsibility.)
228 ///
229 /// Using this method means that code is *not* following [Strict
230 /// Provenance][super#strict-provenance] rules. Supporting
231 /// [`from_exposed_addr_mut`][] complicates specification and reasoning and may not be supported
232 /// by tools that help you to stay conformant with the Rust memory model, so it is recommended
233 /// to use [`addr`][pointer::addr] wherever possible.
234 ///
235 /// On most platforms this will produce a value with the same bytes as the original pointer,
236 /// because all the bytes are dedicated to describing the address. Platforms which need to store
237 /// additional information in the pointer may not support this operation, since the 'expose'
238 /// side-effect which is required for [`from_exposed_addr_mut`][] to work is typically not
239 /// available.
240 ///
241 /// It is unclear whether this method can be given a satisfying unambiguous specification. This
242 /// API and its claimed semantics are part of [Exposed Provenance][super#exposed-provenance].
243 ///
244 /// [`from_exposed_addr_mut`]: from_exposed_addr_mut
245 #[must_use]
246 #[inline(always)]
247 #[unstable(feature = "exposed_provenance", issue = "95228")]
248 pub fn expose_addr(self) -> usize {
249 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
250 self.cast::<()>() as usize
251 }
252
253 /// Creates a new pointer with the given address.
254 ///
255 /// This performs the same operation as an `addr as ptr` cast, but copies
256 /// the *address-space* and *provenance* of `self` to the new pointer.
257 /// This allows us to dynamically preserve and propagate this important
258 /// information in a way that is otherwise impossible with a unary cast.
259 ///
260 /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
261 /// `self` to the given address, and therefore has all the same capabilities and restrictions.
262 ///
263 /// This API and its claimed semantics are an extension to the Strict Provenance experiment,
264 /// see the [module documentation][crate::ptr] for details.
265 #[must_use]
266 #[inline]
267 #[unstable(feature = "strict_provenance", issue = "95228")]
268 pub fn with_addr(self, addr: usize) -> Self {
269 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
270 //
271 // In the mean-time, this operation is defined to be "as if" it was
272 // a wrapping_offset, so we can emulate it as such. This should properly
273 // restore pointer provenance even under today's compiler.
274 let self_addr = self.addr() as isize;
275 let dest_addr = addr as isize;
276 let offset = dest_addr.wrapping_sub(self_addr);
277
278 // This is the canonical desugaring of this operation
279 self.wrapping_byte_offset(offset)
280 }
281
282 /// Creates a new pointer by mapping `self`'s address to a new one.
283 ///
284 /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
285 ///
286 /// This API and its claimed semantics are part of the Strict Provenance experiment,
287 /// see the [module documentation][crate::ptr] for details.
288 #[must_use]
289 #[inline]
290 #[unstable(feature = "strict_provenance", issue = "95228")]
291 pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
292 self.with_addr(f(self.addr()))
293 }
294
295 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
296 ///
297 /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
298 #[unstable(feature = "ptr_metadata", issue = "81513")]
299 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
300 #[inline]
301 pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
302 (self.cast(), super::metadata(self))
303 }
304
305 /// Returns `None` if the pointer is null, or else returns a shared reference to
306 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
307 /// must be used instead.
308 ///
309 /// For the mutable counterpart see [`as_mut`].
310 ///
311 /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
312 /// [`as_mut`]: #method.as_mut
313 ///
314 /// # Safety
315 ///
316 /// When calling this method, you have to ensure that *either* the pointer is null *or*
317 /// all of the following is true:
318 ///
319 /// * The pointer must be properly aligned.
320 ///
321 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
322 ///
323 /// * The pointer must point to an initialized instance of `T`.
324 ///
325 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
326 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
327 /// In particular, while this reference exists, the memory the pointer points to must
328 /// not get mutated (except inside `UnsafeCell`).
329 ///
330 /// This applies even if the result of this method is unused!
331 /// (The part about being initialized is not yet fully decided, but until
332 /// it is, the only safe approach is to ensure that they are indeed initialized.)
333 ///
334 /// [the module documentation]: crate::ptr#safety
335 ///
336 /// # Examples
337 ///
338 /// ```
339 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
340 ///
341 /// unsafe {
342 /// if let Some(val_back) = ptr.as_ref() {
343 /// println!("We got back the value: {val_back}!");
344 /// }
345 /// }
346 /// ```
347 ///
348 /// # Null-unchecked version
349 ///
350 /// If you are sure the pointer can never be null and are looking for some kind of
351 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
352 /// dereference the pointer directly.
353 ///
354 /// ```
355 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
356 ///
357 /// unsafe {
358 /// let val_back = &*ptr;
359 /// println!("We got back the value: {val_back}!");
360 /// }
361 /// ```
362 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
363 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
364 #[inline]
365 pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
366 // SAFETY: the caller must guarantee that `self` is valid for a
367 // reference if it isn't null.
368 if self.is_null() { None } else { unsafe { Some(&*self) } }
369 }
370
371 /// Returns `None` if the pointer is null, or else returns a shared reference to
372 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
373 /// that the value has to be initialized.
374 ///
375 /// For the mutable counterpart see [`as_uninit_mut`].
376 ///
377 /// [`as_ref`]: pointer#method.as_ref-1
378 /// [`as_uninit_mut`]: #method.as_uninit_mut
379 ///
380 /// # Safety
381 ///
382 /// When calling this method, you have to ensure that *either* the pointer is null *or*
383 /// all of the following is true:
384 ///
385 /// * The pointer must be properly aligned.
386 ///
387 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
388 ///
389 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
390 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
391 /// In particular, while this reference exists, the memory the pointer points to must
392 /// not get mutated (except inside `UnsafeCell`).
393 ///
394 /// This applies even if the result of this method is unused!
395 ///
396 /// [the module documentation]: crate::ptr#safety
397 ///
398 /// # Examples
399 ///
400 /// ```
401 /// #![feature(ptr_as_uninit)]
402 ///
403 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
404 ///
405 /// unsafe {
406 /// if let Some(val_back) = ptr.as_uninit_ref() {
407 /// println!("We got back the value: {}!", val_back.assume_init());
408 /// }
409 /// }
410 /// ```
411 #[inline]
412 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
413 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
414 pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
415 where
416 T: Sized,
417 {
418 // SAFETY: the caller must guarantee that `self` meets all the
419 // requirements for a reference.
420 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
421 }
422
423 /// Calculates the offset from a pointer.
424 ///
425 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
426 /// offset of `3 * size_of::<T>()` bytes.
427 ///
428 /// # Safety
429 ///
430 /// If any of the following conditions are violated, the result is Undefined
431 /// Behavior:
432 ///
433 /// * Both the starting and resulting pointer must be either in bounds or one
434 /// byte past the end of the same [allocated object].
435 ///
436 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
437 ///
438 /// * The offset being in bounds cannot rely on "wrapping around" the address
439 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
440 ///
441 /// The compiler and standard library generally tries to ensure allocations
442 /// never reach a size where an offset is a concern. For instance, `Vec`
443 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
444 /// `vec.as_ptr().add(vec.len())` is always safe.
445 ///
446 /// Most platforms fundamentally can't even construct such an allocation.
447 /// For instance, no known 64-bit platform can ever serve a request
448 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
449 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
450 /// more than `isize::MAX` bytes with things like Physical Address
451 /// Extension. As such, memory acquired directly from allocators or memory
452 /// mapped files *may* be too large to handle with this function.
453 ///
454 /// Consider using [`wrapping_offset`] instead if these constraints are
455 /// difficult to satisfy. The only advantage of this method is that it
456 /// enables more aggressive compiler optimizations.
457 ///
458 /// [`wrapping_offset`]: #method.wrapping_offset
459 /// [allocated object]: crate::ptr#allocated-object
460 ///
461 /// # Examples
462 ///
463 /// ```
464 /// let mut s = [1, 2, 3];
465 /// let ptr: *mut u32 = s.as_mut_ptr();
466 ///
467 /// unsafe {
468 /// println!("{}", *ptr.offset(1));
469 /// println!("{}", *ptr.offset(2));
470 /// }
471 /// ```
472 #[stable(feature = "rust1", since = "1.0.0")]
473 #[must_use = "returns a new pointer rather than modifying its argument"]
474 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
475 #[inline(always)]
476 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
477 pub const unsafe fn offset(self, count: isize) -> *mut T
478 where
479 T: Sized,
480 {
481 // SAFETY: the caller must uphold the safety contract for `offset`.
482 // The obtained pointer is valid for writes since the caller must
483 // guarantee that it points to the same allocated object as `self`.
484 unsafe { intrinsics::offset(self, count) }
485 }
486
487 /// Calculates the offset from a pointer in bytes.
488 ///
489 /// `count` is in units of **bytes**.
490 ///
491 /// This is purely a convenience for casting to a `u8` pointer and
492 /// using [offset][pointer::offset] on it. See that method for documentation
493 /// and safety requirements.
494 ///
495 /// For non-`Sized` pointees this operation changes only the data pointer,
496 /// leaving the metadata untouched.
497 #[must_use]
498 #[inline(always)]
499 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
500 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
501 #[rustc_allow_const_fn_unstable(set_ptr_value)]
502 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
503 pub const unsafe fn byte_offset(self, count: isize) -> Self {
504 // SAFETY: the caller must uphold the safety contract for `offset`.
505 unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
506 }
507
508 /// Calculates the offset from a pointer using wrapping arithmetic.
509 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
510 /// offset of `3 * size_of::<T>()` bytes.
511 ///
512 /// # Safety
513 ///
514 /// This operation itself is always safe, but using the resulting pointer is not.
515 ///
516 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
517 /// be used to read or write other allocated objects.
518 ///
519 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
520 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
521 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
522 /// `x` and `y` point into the same allocated object.
523 ///
524 /// Compared to [`offset`], this method basically delays the requirement of staying within the
525 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
526 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
527 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
528 /// can be optimized better and is thus preferable in performance-sensitive code.
529 ///
530 /// The delayed check only considers the value of the pointer that was dereferenced, not the
531 /// intermediate values used during the computation of the final result. For example,
532 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
533 /// words, leaving the allocated object and then re-entering it later is permitted.
534 ///
535 /// [`offset`]: #method.offset
536 /// [allocated object]: crate::ptr#allocated-object
537 ///
538 /// # Examples
539 ///
540 /// ```
541 /// // Iterate using a raw pointer in increments of two elements
542 /// let mut data = [1u8, 2, 3, 4, 5];
543 /// let mut ptr: *mut u8 = data.as_mut_ptr();
544 /// let step = 2;
545 /// let end_rounded_up = ptr.wrapping_offset(6);
546 ///
547 /// while ptr != end_rounded_up {
548 /// unsafe {
549 /// *ptr = 0;
550 /// }
551 /// ptr = ptr.wrapping_offset(step);
552 /// }
553 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
554 /// ```
555 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
556 #[must_use = "returns a new pointer rather than modifying its argument"]
557 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
558 #[inline(always)]
559 pub const fn wrapping_offset(self, count: isize) -> *mut T
560 where
561 T: Sized,
562 {
563 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
564 unsafe { intrinsics::arith_offset(self, count) as *mut T }
565 }
566
567 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
568 ///
569 /// `count` is in units of **bytes**.
570 ///
571 /// This is purely a convenience for casting to a `u8` pointer and
572 /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
573 /// for documentation.
574 ///
575 /// For non-`Sized` pointees this operation changes only the data pointer,
576 /// leaving the metadata untouched.
577 #[must_use]
578 #[inline(always)]
579 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
580 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
581 #[rustc_allow_const_fn_unstable(set_ptr_value)]
582 pub const fn wrapping_byte_offset(self, count: isize) -> Self {
583 self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
584 }
585
586 /// Masks out bits of the pointer according to a mask.
587 ///
588 /// This is convenience for `ptr.map_addr(|a| a & mask)`.
589 ///
590 /// For non-`Sized` pointees this operation changes only the data pointer,
591 /// leaving the metadata untouched.
592 ///
593 /// ## Examples
594 ///
595 /// ```
596 /// #![feature(ptr_mask, strict_provenance)]
597 /// let mut v = 17_u32;
598 /// let ptr: *mut u32 = &mut v;
599 ///
600 /// // `u32` is 4 bytes aligned,
601 /// // which means that lower 2 bits are always 0.
602 /// let tag_mask = 0b11;
603 /// let ptr_mask = !tag_mask;
604 ///
605 /// // We can store something in these lower bits
606 /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
607 ///
608 /// // Get the "tag" back
609 /// let tag = tagged_ptr.addr() & tag_mask;
610 /// assert_eq!(tag, 0b10);
611 ///
612 /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
613 /// // To get original pointer `mask` can be used:
614 /// let masked_ptr = tagged_ptr.mask(ptr_mask);
615 /// assert_eq!(unsafe { *masked_ptr }, 17);
616 ///
617 /// unsafe { *masked_ptr = 0 };
618 /// assert_eq!(v, 0);
619 /// ```
620 #[unstable(feature = "ptr_mask", issue = "98290")]
621 #[must_use = "returns a new pointer rather than modifying its argument"]
622 #[inline(always)]
623 pub fn mask(self, mask: usize) -> *mut T {
624 intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
625 }
626
627 /// Returns `None` if the pointer is null, or else returns a unique reference to
628 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
629 /// must be used instead.
630 ///
631 /// For the shared counterpart see [`as_ref`].
632 ///
633 /// [`as_uninit_mut`]: #method.as_uninit_mut
634 /// [`as_ref`]: pointer#method.as_ref-1
635 ///
636 /// # Safety
637 ///
638 /// When calling this method, you have to ensure that *either* the pointer is null *or*
639 /// all of the following is true:
640 ///
641 /// * The pointer must be properly aligned.
642 ///
643 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
644 ///
645 /// * The pointer must point to an initialized instance of `T`.
646 ///
647 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
648 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
649 /// In particular, while this reference exists, the memory the pointer points to must
650 /// not get accessed (read or written) through any other pointer.
651 ///
652 /// This applies even if the result of this method is unused!
653 /// (The part about being initialized is not yet fully decided, but until
654 /// it is, the only safe approach is to ensure that they are indeed initialized.)
655 ///
656 /// [the module documentation]: crate::ptr#safety
657 ///
658 /// # Examples
659 ///
660 /// ```
661 /// let mut s = [1, 2, 3];
662 /// let ptr: *mut u32 = s.as_mut_ptr();
663 /// let first_value = unsafe { ptr.as_mut().unwrap() };
664 /// *first_value = 4;
665 /// # assert_eq!(s, [4, 2, 3]);
666 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
667 /// ```
668 ///
669 /// # Null-unchecked version
670 ///
671 /// If you are sure the pointer can never be null and are looking for some kind of
672 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
673 /// you can dereference the pointer directly.
674 ///
675 /// ```
676 /// let mut s = [1, 2, 3];
677 /// let ptr: *mut u32 = s.as_mut_ptr();
678 /// let first_value = unsafe { &mut *ptr };
679 /// *first_value = 4;
680 /// # assert_eq!(s, [4, 2, 3]);
681 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
682 /// ```
683 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
684 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
685 #[inline]
686 pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
687 // SAFETY: the caller must guarantee that `self` is be valid for
688 // a mutable reference if it isn't null.
689 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
690 }
691
692 /// Returns `None` if the pointer is null, or else returns a unique reference to
693 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
694 /// that the value has to be initialized.
695 ///
696 /// For the shared counterpart see [`as_uninit_ref`].
697 ///
698 /// [`as_mut`]: #method.as_mut
699 /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
700 ///
701 /// # Safety
702 ///
703 /// When calling this method, you have to ensure that *either* the pointer is null *or*
704 /// all of the following is true:
705 ///
706 /// * The pointer must be properly aligned.
707 ///
708 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
709 ///
710 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
711 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
712 /// In particular, while this reference exists, the memory the pointer points to must
713 /// not get accessed (read or written) through any other pointer.
714 ///
715 /// This applies even if the result of this method is unused!
716 ///
717 /// [the module documentation]: crate::ptr#safety
718 #[inline]
719 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
720 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
721 pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
722 where
723 T: Sized,
724 {
725 // SAFETY: the caller must guarantee that `self` meets all the
726 // requirements for a reference.
727 if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
728 }
729
730 /// Returns whether two pointers are guaranteed to be equal.
731 ///
732 /// At runtime this function behaves like `Some(self == other)`.
733 /// However, in some contexts (e.g., compile-time evaluation),
734 /// it is not always possible to determine equality of two pointers, so this function may
735 /// spuriously return `None` for pointers that later actually turn out to have its equality known.
736 /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
737 ///
738 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
739 /// version and unsafe code must not
740 /// rely on the result of this function for soundness. It is suggested to only use this function
741 /// for performance optimizations where spurious `None` return values by this function do not
742 /// affect the outcome, but just the performance.
743 /// The consequences of using this method to make runtime and compile-time code behave
744 /// differently have not been explored. This method should not be used to introduce such
745 /// differences, and it should also not be stabilized before we have a better understanding
746 /// of this issue.
747 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
748 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
749 #[inline]
750 pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
751 where
752 T: Sized,
753 {
754 (self as *const T).guaranteed_eq(other as _)
755 }
756
757 /// Returns whether two pointers are guaranteed to be inequal.
758 ///
759 /// At runtime this function behaves like `Some(self != other)`.
760 /// However, in some contexts (e.g., compile-time evaluation),
761 /// it is not always possible to determine inequality of two pointers, so this function may
762 /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
763 /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
764 ///
765 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
766 /// version and unsafe code must not
767 /// rely on the result of this function for soundness. It is suggested to only use this function
768 /// for performance optimizations where spurious `None` return values by this function do not
769 /// affect the outcome, but just the performance.
770 /// The consequences of using this method to make runtime and compile-time code behave
771 /// differently have not been explored. This method should not be used to introduce such
772 /// differences, and it should also not be stabilized before we have a better understanding
773 /// of this issue.
774 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
775 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
776 #[inline]
777 pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
778 where
779 T: Sized,
780 {
781 (self as *const T).guaranteed_ne(other as _)
782 }
783
784 /// Calculates the distance between two pointers. The returned value is in
785 /// units of T: the distance in bytes divided by `mem::size_of::<T>()`.
786 ///
787 /// This is equivalent to `(self as isize - origin as isize) / (mem::size_of::<T>() as isize)`,
788 /// except that it has a lot more opportunities for UB, in exchange for the compiler
789 /// better understanding what you are doing.
790 ///
791 /// The primary motivation of this method is for computing the `len` of an array/slice
792 /// of `T` that you are currently representing as a "start" and "end" pointer
793 /// (and "end" is "one past the end" of the array).
794 /// In that case, `end.offset_from(start)` gets you the length of the array.
795 ///
796 /// All of the following safety requirements are trivially satisfied for this usecase.
797 ///
798 /// [`offset`]: pointer#method.offset-1
799 ///
800 /// # Safety
801 ///
802 /// If any of the following conditions are violated, the result is Undefined
803 /// Behavior:
804 ///
805 /// * Both `self` and `origin` must be either in bounds or one
806 /// byte past the end of the same [allocated object].
807 ///
808 /// * Both pointers must be *derived from* a pointer to the same object.
809 /// (See below for an example.)
810 ///
811 /// * The distance between the pointers, in bytes, must be an exact multiple
812 /// of the size of `T`.
813 ///
814 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
815 ///
816 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
817 ///
818 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
819 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
820 /// the last two conditions. The standard library also generally ensures that allocations
821 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
822 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
823 /// always satisfies the last two conditions.
824 ///
825 /// Most platforms fundamentally can't even construct such a large allocation.
826 /// For instance, no known 64-bit platform can ever serve a request
827 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
828 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
829 /// more than `isize::MAX` bytes with things like Physical Address
830 /// Extension. As such, memory acquired directly from allocators or memory
831 /// mapped files *may* be too large to handle with this function.
832 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
833 /// such large allocations either.)
834 ///
835 /// The requirement for pointers to be derived from the same allocated object is primarily
836 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
837 /// objects is not known at compile-time. However, the requirement also exists at
838 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
839 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
840 /// origin as isize) / mem::size_of::<T>()`.
841 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
842 ///
843 /// [`add`]: #method.add
844 /// [allocated object]: crate::ptr#allocated-object
845 ///
846 /// # Panics
847 ///
848 /// This function panics if `T` is a Zero-Sized Type ("ZST").
849 ///
850 /// # Examples
851 ///
852 /// Basic usage:
853 ///
854 /// ```
855 /// let mut a = [0; 5];
856 /// let ptr1: *mut i32 = &mut a[1];
857 /// let ptr2: *mut i32 = &mut a[3];
858 /// unsafe {
859 /// assert_eq!(ptr2.offset_from(ptr1), 2);
860 /// assert_eq!(ptr1.offset_from(ptr2), -2);
861 /// assert_eq!(ptr1.offset(2), ptr2);
862 /// assert_eq!(ptr2.offset(-2), ptr1);
863 /// }
864 /// ```
865 ///
866 /// *Incorrect* usage:
867 ///
868 /// ```rust,no_run
869 /// let ptr1 = Box::into_raw(Box::new(0u8));
870 /// let ptr2 = Box::into_raw(Box::new(1u8));
871 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
872 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
873 /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff);
874 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
875 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
876 /// // computing their offset is undefined behavior, even though
877 /// // they point to the same address!
878 /// unsafe {
879 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
880 /// }
881 /// ```
882 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
883 #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
884 #[inline(always)]
885 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
886 pub const unsafe fn offset_from(self, origin: *const T) -> isize
887 where
888 T: Sized,
889 {
890 // SAFETY: the caller must uphold the safety contract for `offset_from`.
891 unsafe { (self as *const T).offset_from(origin) }
892 }
893
894 /// Calculates the distance between two pointers. The returned value is in
895 /// units of **bytes**.
896 ///
897 /// This is purely a convenience for casting to a `u8` pointer and
898 /// using [`offset_from`][pointer::offset_from] on it. See that method for
899 /// documentation and safety requirements.
900 ///
901 /// For non-`Sized` pointees this operation considers only the data pointers,
902 /// ignoring the metadata.
903 #[inline(always)]
904 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
905 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
906 #[rustc_allow_const_fn_unstable(set_ptr_value)]
907 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
908 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
909 // SAFETY: the caller must uphold the safety contract for `offset_from`.
910 unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
911 }
912
913 /// Calculates the distance between two pointers, *where it's known that
914 /// `self` is equal to or greater than `origin`*. The returned value is in
915 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
916 ///
917 /// This computes the same value that [`offset_from`](#method.offset_from)
918 /// would compute, but with the added precondition that the offset is
919 /// guaranteed to be non-negative. This method is equivalent to
920 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
921 /// but it provides slightly more information to the optimizer, which can
922 /// sometimes allow it to optimize slightly better with some backends.
923 ///
924 /// This method can be though of as recovering the `count` that was passed
925 /// to [`add`](#method.add) (or, with the parameters in the other order,
926 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
927 /// that their safety preconditions are met:
928 /// ```rust
929 /// # #![feature(ptr_sub_ptr)]
930 /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool {
931 /// ptr.sub_ptr(origin) == count
932 /// # &&
933 /// origin.add(count) == ptr
934 /// # &&
935 /// ptr.sub(count) == origin
936 /// # }
937 /// ```
938 ///
939 /// # Safety
940 ///
941 /// - The distance between the pointers must be non-negative (`self >= origin`)
942 ///
943 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
944 /// apply to this method as well; see it for the full details.
945 ///
946 /// Importantly, despite the return type of this method being able to represent
947 /// a larger offset, it's still *not permitted* to pass pointers which differ
948 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
949 /// always be less than or equal to `isize::MAX as usize`.
950 ///
951 /// # Panics
952 ///
953 /// This function panics if `T` is a Zero-Sized Type ("ZST").
954 ///
955 /// # Examples
956 ///
957 /// ```
958 /// #![feature(ptr_sub_ptr)]
959 ///
960 /// let mut a = [0; 5];
961 /// let p: *mut i32 = a.as_mut_ptr();
962 /// unsafe {
963 /// let ptr1: *mut i32 = p.add(1);
964 /// let ptr2: *mut i32 = p.add(3);
965 ///
966 /// assert_eq!(ptr2.sub_ptr(ptr1), 2);
967 /// assert_eq!(ptr1.add(2), ptr2);
968 /// assert_eq!(ptr2.sub(2), ptr1);
969 /// assert_eq!(ptr2.sub_ptr(ptr2), 0);
970 /// }
971 ///
972 /// // This would be incorrect, as the pointers are not correctly ordered:
973 /// // ptr1.offset_from(ptr2)
974 #[unstable(feature = "ptr_sub_ptr", issue = "95892")]
975 #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")]
976 #[inline]
977 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
978 pub const unsafe fn sub_ptr(self, origin: *const T) -> usize
979 where
980 T: Sized,
981 {
982 // SAFETY: the caller must uphold the safety contract for `sub_ptr`.
983 unsafe { (self as *const T).sub_ptr(origin) }
984 }
985
986 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
987 ///
988 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
989 /// offset of `3 * size_of::<T>()` bytes.
990 ///
991 /// # Safety
992 ///
993 /// If any of the following conditions are violated, the result is Undefined
994 /// Behavior:
995 ///
996 /// * Both the starting and resulting pointer must be either in bounds or one
997 /// byte past the end of the same [allocated object].
998 ///
999 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
1000 ///
1001 /// * The offset being in bounds cannot rely on "wrapping around" the address
1002 /// space. That is, the infinite-precision sum must fit in a `usize`.
1003 ///
1004 /// The compiler and standard library generally tries to ensure allocations
1005 /// never reach a size where an offset is a concern. For instance, `Vec`
1006 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
1007 /// `vec.as_ptr().add(vec.len())` is always safe.
1008 ///
1009 /// Most platforms fundamentally can't even construct such an allocation.
1010 /// For instance, no known 64-bit platform can ever serve a request
1011 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
1012 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
1013 /// more than `isize::MAX` bytes with things like Physical Address
1014 /// Extension. As such, memory acquired directly from allocators or memory
1015 /// mapped files *may* be too large to handle with this function.
1016 ///
1017 /// Consider using [`wrapping_add`] instead if these constraints are
1018 /// difficult to satisfy. The only advantage of this method is that it
1019 /// enables more aggressive compiler optimizations.
1020 ///
1021 /// [`wrapping_add`]: #method.wrapping_add
1022 /// [allocated object]: crate::ptr#allocated-object
1023 ///
1024 /// # Examples
1025 ///
1026 /// ```
1027 /// let s: &str = "123";
1028 /// let ptr: *const u8 = s.as_ptr();
1029 ///
1030 /// unsafe {
1031 /// println!("{}", *ptr.add(1) as char);
1032 /// println!("{}", *ptr.add(2) as char);
1033 /// }
1034 /// ```
1035 #[stable(feature = "pointer_methods", since = "1.26.0")]
1036 #[must_use = "returns a new pointer rather than modifying its argument"]
1037 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1038 #[inline(always)]
1039 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1040 pub const unsafe fn add(self, count: usize) -> Self
1041 where
1042 T: Sized,
1043 {
1044 // SAFETY: the caller must uphold the safety contract for `offset`.
1045 unsafe { intrinsics::offset(self, count) }
1046 }
1047
1048 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
1049 ///
1050 /// `count` is in units of bytes.
1051 ///
1052 /// This is purely a convenience for casting to a `u8` pointer and
1053 /// using [add][pointer::add] on it. See that method for documentation
1054 /// and safety requirements.
1055 ///
1056 /// For non-`Sized` pointees this operation changes only the data pointer,
1057 /// leaving the metadata untouched.
1058 #[must_use]
1059 #[inline(always)]
1060 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1061 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1062 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1063 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1064 pub const unsafe fn byte_add(self, count: usize) -> Self {
1065 // SAFETY: the caller must uphold the safety contract for `add`.
1066 unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1067 }
1068
1069 /// Calculates the offset from a pointer (convenience for
1070 /// `.offset((count as isize).wrapping_neg())`).
1071 ///
1072 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1073 /// offset of `3 * size_of::<T>()` bytes.
1074 ///
1075 /// # Safety
1076 ///
1077 /// If any of the following conditions are violated, the result is Undefined
1078 /// Behavior:
1079 ///
1080 /// * Both the starting and resulting pointer must be either in bounds or one
1081 /// byte past the end of the same [allocated object].
1082 ///
1083 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
1084 ///
1085 /// * The offset being in bounds cannot rely on "wrapping around" the address
1086 /// space. That is, the infinite-precision sum must fit in a usize.
1087 ///
1088 /// The compiler and standard library generally tries to ensure allocations
1089 /// never reach a size where an offset is a concern. For instance, `Vec`
1090 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
1091 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
1092 ///
1093 /// Most platforms fundamentally can't even construct such an allocation.
1094 /// For instance, no known 64-bit platform can ever serve a request
1095 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
1096 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
1097 /// more than `isize::MAX` bytes with things like Physical Address
1098 /// Extension. As such, memory acquired directly from allocators or memory
1099 /// mapped files *may* be too large to handle with this function.
1100 ///
1101 /// Consider using [`wrapping_sub`] instead if these constraints are
1102 /// difficult to satisfy. The only advantage of this method is that it
1103 /// enables more aggressive compiler optimizations.
1104 ///
1105 /// [`wrapping_sub`]: #method.wrapping_sub
1106 /// [allocated object]: crate::ptr#allocated-object
1107 ///
1108 /// # Examples
1109 ///
1110 /// ```
1111 /// let s: &str = "123";
1112 ///
1113 /// unsafe {
1114 /// let end: *const u8 = s.as_ptr().add(3);
1115 /// println!("{}", *end.sub(1) as char);
1116 /// println!("{}", *end.sub(2) as char);
1117 /// }
1118 /// ```
1119 #[stable(feature = "pointer_methods", since = "1.26.0")]
1120 #[must_use = "returns a new pointer rather than modifying its argument"]
1121 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1122 // We could always go back to wrapping if unchecked becomes unacceptable
1123 #[rustc_allow_const_fn_unstable(const_int_unchecked_arith)]
1124 #[inline(always)]
1125 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1126 pub const unsafe fn sub(self, count: usize) -> Self
1127 where
1128 T: Sized,
1129 {
1130 if T::IS_ZST {
1131 // Pointer arithmetic does nothing when the pointee is a ZST.
1132 self
1133 } else {
1134 // SAFETY: the caller must uphold the safety contract for `offset`.
1135 // Because the pointee is *not* a ZST, that means that `count` is
1136 // at most `isize::MAX`, and thus the negation cannot overflow.
1137 unsafe { self.offset(intrinsics::unchecked_sub(0, count as isize)) }
1138 }
1139 }
1140
1141 /// Calculates the offset from a pointer in bytes (convenience for
1142 /// `.byte_offset((count as isize).wrapping_neg())`).
1143 ///
1144 /// `count` is in units of bytes.
1145 ///
1146 /// This is purely a convenience for casting to a `u8` pointer and
1147 /// using [sub][pointer::sub] on it. See that method for documentation
1148 /// and safety requirements.
1149 ///
1150 /// For non-`Sized` pointees this operation changes only the data pointer,
1151 /// leaving the metadata untouched.
1152 #[must_use]
1153 #[inline(always)]
1154 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1155 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1156 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1157 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1158 pub const unsafe fn byte_sub(self, count: usize) -> Self {
1159 // SAFETY: the caller must uphold the safety contract for `sub`.
1160 unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1161 }
1162
1163 /// Calculates the offset from a pointer using wrapping arithmetic.
1164 /// (convenience for `.wrapping_offset(count as isize)`)
1165 ///
1166 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1167 /// offset of `3 * size_of::<T>()` bytes.
1168 ///
1169 /// # Safety
1170 ///
1171 /// This operation itself is always safe, but using the resulting pointer is not.
1172 ///
1173 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1174 /// be used to read or write other allocated objects.
1175 ///
1176 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1177 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1178 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1179 /// `x` and `y` point into the same allocated object.
1180 ///
1181 /// Compared to [`add`], this method basically delays the requirement of staying within the
1182 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1183 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1184 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1185 /// can be optimized better and is thus preferable in performance-sensitive code.
1186 ///
1187 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1188 /// intermediate values used during the computation of the final result. For example,
1189 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1190 /// allocated object and then re-entering it later is permitted.
1191 ///
1192 /// [`add`]: #method.add
1193 /// [allocated object]: crate::ptr#allocated-object
1194 ///
1195 /// # Examples
1196 ///
1197 /// ```
1198 /// // Iterate using a raw pointer in increments of two elements
1199 /// let data = [1u8, 2, 3, 4, 5];
1200 /// let mut ptr: *const u8 = data.as_ptr();
1201 /// let step = 2;
1202 /// let end_rounded_up = ptr.wrapping_add(6);
1203 ///
1204 /// // This loop prints "1, 3, 5, "
1205 /// while ptr != end_rounded_up {
1206 /// unsafe {
1207 /// print!("{}, ", *ptr);
1208 /// }
1209 /// ptr = ptr.wrapping_add(step);
1210 /// }
1211 /// ```
1212 #[stable(feature = "pointer_methods", since = "1.26.0")]
1213 #[must_use = "returns a new pointer rather than modifying its argument"]
1214 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1215 #[inline(always)]
1216 pub const fn wrapping_add(self, count: usize) -> Self
1217 where
1218 T: Sized,
1219 {
1220 self.wrapping_offset(count as isize)
1221 }
1222
1223 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1224 /// (convenience for `.wrapping_byte_offset(count as isize)`)
1225 ///
1226 /// `count` is in units of bytes.
1227 ///
1228 /// This is purely a convenience for casting to a `u8` pointer and
1229 /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1230 ///
1231 /// For non-`Sized` pointees this operation changes only the data pointer,
1232 /// leaving the metadata untouched.
1233 #[must_use]
1234 #[inline(always)]
1235 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1236 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1237 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1238 pub const fn wrapping_byte_add(self, count: usize) -> Self {
1239 self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1240 }
1241
1242 /// Calculates the offset from a pointer using wrapping arithmetic.
1243 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
1244 ///
1245 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1246 /// offset of `3 * size_of::<T>()` bytes.
1247 ///
1248 /// # Safety
1249 ///
1250 /// This operation itself is always safe, but using the resulting pointer is not.
1251 ///
1252 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1253 /// be used to read or write other allocated objects.
1254 ///
1255 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1256 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1257 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1258 /// `x` and `y` point into the same allocated object.
1259 ///
1260 /// Compared to [`sub`], this method basically delays the requirement of staying within the
1261 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1262 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1263 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1264 /// can be optimized better and is thus preferable in performance-sensitive code.
1265 ///
1266 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1267 /// intermediate values used during the computation of the final result. For example,
1268 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1269 /// allocated object and then re-entering it later is permitted.
1270 ///
1271 /// [`sub`]: #method.sub
1272 /// [allocated object]: crate::ptr#allocated-object
1273 ///
1274 /// # Examples
1275 ///
1276 /// ```
1277 /// // Iterate using a raw pointer in increments of two elements (backwards)
1278 /// let data = [1u8, 2, 3, 4, 5];
1279 /// let mut ptr: *const u8 = data.as_ptr();
1280 /// let start_rounded_down = ptr.wrapping_sub(2);
1281 /// ptr = ptr.wrapping_add(4);
1282 /// let step = 2;
1283 /// // This loop prints "5, 3, 1, "
1284 /// while ptr != start_rounded_down {
1285 /// unsafe {
1286 /// print!("{}, ", *ptr);
1287 /// }
1288 /// ptr = ptr.wrapping_sub(step);
1289 /// }
1290 /// ```
1291 #[stable(feature = "pointer_methods", since = "1.26.0")]
1292 #[must_use = "returns a new pointer rather than modifying its argument"]
1293 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1294 #[inline(always)]
1295 pub const fn wrapping_sub(self, count: usize) -> Self
1296 where
1297 T: Sized,
1298 {
1299 self.wrapping_offset((count as isize).wrapping_neg())
1300 }
1301
1302 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1303 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
1304 ///
1305 /// `count` is in units of bytes.
1306 ///
1307 /// This is purely a convenience for casting to a `u8` pointer and
1308 /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1309 ///
1310 /// For non-`Sized` pointees this operation changes only the data pointer,
1311 /// leaving the metadata untouched.
1312 #[must_use]
1313 #[inline(always)]
1314 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1315 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1316 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1317 pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1318 self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1319 }
1320
1321 /// Reads the value from `self` without moving it. This leaves the
1322 /// memory in `self` unchanged.
1323 ///
1324 /// See [`ptr::read`] for safety concerns and examples.
1325 ///
1326 /// [`ptr::read`]: crate::ptr::read()
1327 #[stable(feature = "pointer_methods", since = "1.26.0")]
1328 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1329 #[inline(always)]
1330 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1331 pub const unsafe fn read(self) -> T
1332 where
1333 T: Sized,
1334 {
1335 // SAFETY: the caller must uphold the safety contract for ``.
1336 unsafe { read(self) }
1337 }
1338
1339 /// Performs a volatile read of the value from `self` without moving it. This
1340 /// leaves the memory in `self` unchanged.
1341 ///
1342 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1343 /// to not be elided or reordered by the compiler across other volatile
1344 /// operations.
1345 ///
1346 /// See [`ptr::read_volatile`] for safety concerns and examples.
1347 ///
1348 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1349 #[stable(feature = "pointer_methods", since = "1.26.0")]
1350 #[inline(always)]
1351 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1352 pub unsafe fn read_volatile(self) -> T
1353 where
1354 T: Sized,
1355 {
1356 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1357 unsafe { read_volatile(self) }
1358 }
1359
1360 /// Reads the value from `self` without moving it. This leaves the
1361 /// memory in `self` unchanged.
1362 ///
1363 /// Unlike `read`, the pointer may be unaligned.
1364 ///
1365 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1366 ///
1367 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1368 #[stable(feature = "pointer_methods", since = "1.26.0")]
1369 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1370 #[inline(always)]
1371 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1372 pub const unsafe fn read_unaligned(self) -> T
1373 where
1374 T: Sized,
1375 {
1376 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1377 unsafe { read_unaligned(self) }
1378 }
1379
1380 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1381 /// and destination may overlap.
1382 ///
1383 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1384 ///
1385 /// See [`ptr::copy`] for safety concerns and examples.
1386 ///
1387 /// [`ptr::copy`]: crate::ptr::copy()
1388 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
1389 #[stable(feature = "pointer_methods", since = "1.26.0")]
1390 #[inline(always)]
1391 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1392 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1393 where
1394 T: Sized,
1395 {
1396 // SAFETY: the caller must uphold the safety contract for `copy`.
1397 unsafe { copy(self, dest, count) }
1398 }
1399
1400 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1401 /// and destination may *not* overlap.
1402 ///
1403 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1404 ///
1405 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1406 ///
1407 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1408 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
1409 #[stable(feature = "pointer_methods", since = "1.26.0")]
1410 #[inline(always)]
1411 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1412 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1413 where
1414 T: Sized,
1415 {
1416 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1417 unsafe { copy_nonoverlapping(self, dest, count) }
1418 }
1419
1420 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1421 /// and destination may overlap.
1422 ///
1423 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1424 ///
1425 /// See [`ptr::copy`] for safety concerns and examples.
1426 ///
1427 /// [`ptr::copy`]: crate::ptr::copy()
1428 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
1429 #[stable(feature = "pointer_methods", since = "1.26.0")]
1430 #[inline(always)]
1431 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1432 pub const unsafe fn copy_from(self, src: *const T, count: usize)
1433 where
1434 T: Sized,
1435 {
1436 // SAFETY: the caller must uphold the safety contract for `copy`.
1437 unsafe { copy(src, self, count) }
1438 }
1439
1440 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1441 /// and destination may *not* overlap.
1442 ///
1443 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1444 ///
1445 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1446 ///
1447 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1448 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
1449 #[stable(feature = "pointer_methods", since = "1.26.0")]
1450 #[inline(always)]
1451 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1452 pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1453 where
1454 T: Sized,
1455 {
1456 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1457 unsafe { copy_nonoverlapping(src, self, count) }
1458 }
1459
1460 /// Executes the destructor (if any) of the pointed-to value.
1461 ///
1462 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1463 ///
1464 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1465 #[stable(feature = "pointer_methods", since = "1.26.0")]
1466 #[inline(always)]
1467 pub unsafe fn drop_in_place(self) {
1468 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1469 unsafe { drop_in_place(self) }
1470 }
1471
1472 /// Overwrites a memory location with the given value without reading or
1473 /// dropping the old value.
1474 ///
1475 /// See [`ptr::write`] for safety concerns and examples.
1476 ///
1477 /// [`ptr::write`]: crate::ptr::write()
1478 #[stable(feature = "pointer_methods", since = "1.26.0")]
1479 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1480 #[inline(always)]
1481 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1482 pub const unsafe fn write(self, val: T)
1483 where
1484 T: Sized,
1485 {
1486 // SAFETY: the caller must uphold the safety contract for `write`.
1487 unsafe { write(self, val) }
1488 }
1489
1490 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1491 /// bytes of memory starting at `self` to `val`.
1492 ///
1493 /// See [`ptr::write_bytes`] for safety concerns and examples.
1494 ///
1495 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1496 #[doc(alias = "memset")]
1497 #[stable(feature = "pointer_methods", since = "1.26.0")]
1498 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1499 #[inline(always)]
1500 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1501 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1502 where
1503 T: Sized,
1504 {
1505 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1506 unsafe { write_bytes(self, val, count) }
1507 }
1508
1509 /// Performs a volatile write of a memory location with the given value without
1510 /// reading or dropping the old value.
1511 ///
1512 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1513 /// to not be elided or reordered by the compiler across other volatile
1514 /// operations.
1515 ///
1516 /// See [`ptr::write_volatile`] for safety concerns and examples.
1517 ///
1518 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1519 #[stable(feature = "pointer_methods", since = "1.26.0")]
1520 #[inline(always)]
1521 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1522 pub unsafe fn write_volatile(self, val: T)
1523 where
1524 T: Sized,
1525 {
1526 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1527 unsafe { write_volatile(self, val) }
1528 }
1529
1530 /// Overwrites a memory location with the given value without reading or
1531 /// dropping the old value.
1532 ///
1533 /// Unlike `write`, the pointer may be unaligned.
1534 ///
1535 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1536 ///
1537 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1538 #[stable(feature = "pointer_methods", since = "1.26.0")]
1539 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1540 #[inline(always)]
1541 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1542 pub const unsafe fn write_unaligned(self, val: T)
1543 where
1544 T: Sized,
1545 {
1546 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1547 unsafe { write_unaligned(self, val) }
1548 }
1549
1550 /// Replaces the value at `self` with `src`, returning the old
1551 /// value, without dropping either.
1552 ///
1553 /// See [`ptr::replace`] for safety concerns and examples.
1554 ///
1555 /// [`ptr::replace`]: crate::ptr::replace()
1556 #[stable(feature = "pointer_methods", since = "1.26.0")]
1557 #[inline(always)]
1558 pub unsafe fn replace(self, src: T) -> T
1559 where
1560 T: Sized,
1561 {
1562 // SAFETY: the caller must uphold the safety contract for `replace`.
1563 unsafe { replace(self, src) }
1564 }
1565
1566 /// Swaps the values at two mutable locations of the same type, without
1567 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1568 /// otherwise equivalent.
1569 ///
1570 /// See [`ptr::swap`] for safety concerns and examples.
1571 ///
1572 /// [`ptr::swap`]: crate::ptr::swap()
1573 #[stable(feature = "pointer_methods", since = "1.26.0")]
1574 #[rustc_const_unstable(feature = "const_swap", issue = "83163")]
1575 #[inline(always)]
1576 pub const unsafe fn swap(self, with: *mut T)
1577 where
1578 T: Sized,
1579 {
1580 // SAFETY: the caller must uphold the safety contract for `swap`.
1581 unsafe { swap(self, with) }
1582 }
1583
1584 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1585 /// `align`.
1586 ///
1587 /// If it is not possible to align the pointer, the implementation returns
1588 /// `usize::MAX`. It is permissible for the implementation to *always*
1589 /// return `usize::MAX`. Only your algorithm's performance can depend
1590 /// on getting a usable offset here, not its correctness.
1591 ///
1592 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1593 /// used with the `wrapping_add` method.
1594 ///
1595 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1596 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1597 /// the returned offset is correct in all terms other than alignment.
1598 ///
1599 /// # Panics
1600 ///
1601 /// The function panics if `align` is not a power-of-two.
1602 ///
1603 /// # Examples
1604 ///
1605 /// Accessing adjacent `u8` as `u16`
1606 ///
1607 /// ```
1608 /// use std::mem::align_of;
1609 ///
1610 /// # unsafe {
1611 /// let mut x = [5_u8, 6, 7, 8, 9];
1612 /// let ptr = x.as_mut_ptr();
1613 /// let offset = ptr.align_offset(align_of::<u16>());
1614 ///
1615 /// if offset < x.len() - 1 {
1616 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1617 /// *u16_ptr = 0;
1618 ///
1619 /// assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1620 /// } else {
1621 /// // while the pointer can be aligned via `offset`, it would point
1622 /// // outside the allocation
1623 /// }
1624 /// # }
1625 /// ```
1626 #[must_use]
1627 #[inline]
1628 #[stable(feature = "align_offset", since = "1.36.0")]
1629 #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")]
1630 pub const fn align_offset(self, align: usize) -> usize
1631 where
1632 T: Sized,
1633 {
1634 if !align.is_power_of_two() {
1635 panic!("align_offset: align is not a power-of-two");
1636 }
1637
1638 // SAFETY: `align` has been checked to be a power of 2 above
1639 let ret = unsafe { align_offset(self, align) };
1640
1641 // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1642 #[cfg(miri)]
1643 if ret != usize::MAX {
1644 intrinsics::miri_promise_symbolic_alignment(
1645 self.wrapping_add(ret).cast_const().cast(),
1646 align,
1647 );
1648 }
1649
1650 ret
1651 }
1652
1653 /// Returns whether the pointer is properly aligned for `T`.
1654 ///
1655 /// # Examples
1656 ///
1657 /// ```
1658 /// #![feature(pointer_is_aligned)]
1659 ///
1660 /// // On some platforms, the alignment of i32 is less than 4.
1661 /// #[repr(align(4))]
1662 /// struct AlignedI32(i32);
1663 ///
1664 /// let mut data = AlignedI32(42);
1665 /// let ptr = &mut data as *mut AlignedI32;
1666 ///
1667 /// assert!(ptr.is_aligned());
1668 /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1669 /// ```
1670 ///
1671 /// # At compiletime
1672 /// **Note: Alignment at compiletime is experimental and subject to change. See the
1673 /// [tracking issue] for details.**
1674 ///
1675 /// At compiletime, the compiler may not know where a value will end up in memory.
1676 /// Calling this function on a pointer created from a reference at compiletime will only
1677 /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
1678 /// is never aligned if cast to a type with a stricter alignment than the reference's
1679 /// underlying allocation.
1680 ///
1681 /// ```
1682 /// #![feature(pointer_is_aligned)]
1683 /// #![feature(const_pointer_is_aligned)]
1684 /// #![feature(const_mut_refs)]
1685 ///
1686 /// // On some platforms, the alignment of primitives is less than their size.
1687 /// #[repr(align(4))]
1688 /// struct AlignedI32(i32);
1689 /// #[repr(align(8))]
1690 /// struct AlignedI64(i64);
1691 ///
1692 /// const _: () = {
1693 /// let mut data = AlignedI32(42);
1694 /// let ptr = &mut data as *mut AlignedI32;
1695 /// assert!(ptr.is_aligned());
1696 ///
1697 /// // At runtime either `ptr1` or `ptr2` would be aligned, but at compiletime neither is aligned.
1698 /// let ptr1 = ptr.cast::<AlignedI64>();
1699 /// let ptr2 = ptr.wrapping_add(1).cast::<AlignedI64>();
1700 /// assert!(!ptr1.is_aligned());
1701 /// assert!(!ptr2.is_aligned());
1702 /// };
1703 /// ```
1704 ///
1705 /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
1706 /// pointer is aligned, even if the compiletime pointer wasn't aligned.
1707 ///
1708 /// ```
1709 /// #![feature(pointer_is_aligned)]
1710 /// #![feature(const_pointer_is_aligned)]
1711 ///
1712 /// // On some platforms, the alignment of primitives is less than their size.
1713 /// #[repr(align(4))]
1714 /// struct AlignedI32(i32);
1715 /// #[repr(align(8))]
1716 /// struct AlignedI64(i64);
1717 ///
1718 /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned.
1719 /// // Also, note that mutable references are not allowed in the final value of constants.
1720 /// const COMPTIME_PTR: *mut AlignedI32 = (&AlignedI32(42) as *const AlignedI32).cast_mut();
1721 /// const _: () = assert!(!COMPTIME_PTR.cast::<AlignedI64>().is_aligned());
1722 /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).cast::<AlignedI64>().is_aligned());
1723 ///
1724 /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned.
1725 /// let runtime_ptr = COMPTIME_PTR;
1726 /// assert_ne!(
1727 /// runtime_ptr.cast::<AlignedI64>().is_aligned(),
1728 /// runtime_ptr.wrapping_add(1).cast::<AlignedI64>().is_aligned(),
1729 /// );
1730 /// ```
1731 ///
1732 /// If a pointer is created from a fixed address, this function behaves the same during
1733 /// runtime and compiletime.
1734 ///
1735 /// ```
1736 /// #![feature(pointer_is_aligned)]
1737 /// #![feature(const_pointer_is_aligned)]
1738 ///
1739 /// // On some platforms, the alignment of primitives is less than their size.
1740 /// #[repr(align(4))]
1741 /// struct AlignedI32(i32);
1742 /// #[repr(align(8))]
1743 /// struct AlignedI64(i64);
1744 ///
1745 /// const _: () = {
1746 /// let ptr = 40 as *mut AlignedI32;
1747 /// assert!(ptr.is_aligned());
1748 ///
1749 /// // For pointers with a known address, runtime and compiletime behavior are identical.
1750 /// let ptr1 = ptr.cast::<AlignedI64>();
1751 /// let ptr2 = ptr.wrapping_add(1).cast::<AlignedI64>();
1752 /// assert!(ptr1.is_aligned());
1753 /// assert!(!ptr2.is_aligned());
1754 /// };
1755 /// ```
1756 ///
1757 /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203
1758 #[must_use]
1759 #[inline]
1760 #[unstable(feature = "pointer_is_aligned", issue = "96284")]
1761 #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")]
1762 pub const fn is_aligned(self) -> bool
1763 where
1764 T: Sized,
1765 {
1766 self.is_aligned_to(mem::align_of::<T>())
1767 }
1768
1769 /// Returns whether the pointer is aligned to `align`.
1770 ///
1771 /// For non-`Sized` pointees this operation considers only the data pointer,
1772 /// ignoring the metadata.
1773 ///
1774 /// # Panics
1775 ///
1776 /// The function panics if `align` is not a power-of-two (this includes 0).
1777 ///
1778 /// # Examples
1779 ///
1780 /// ```
1781 /// #![feature(pointer_is_aligned)]
1782 ///
1783 /// // On some platforms, the alignment of i32 is less than 4.
1784 /// #[repr(align(4))]
1785 /// struct AlignedI32(i32);
1786 ///
1787 /// let mut data = AlignedI32(42);
1788 /// let ptr = &mut data as *mut AlignedI32;
1789 ///
1790 /// assert!(ptr.is_aligned_to(1));
1791 /// assert!(ptr.is_aligned_to(2));
1792 /// assert!(ptr.is_aligned_to(4));
1793 ///
1794 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1795 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1796 ///
1797 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1798 /// ```
1799 ///
1800 /// # At compiletime
1801 /// **Note: Alignment at compiletime is experimental and subject to change. See the
1802 /// [tracking issue] for details.**
1803 ///
1804 /// At compiletime, the compiler may not know where a value will end up in memory.
1805 /// Calling this function on a pointer created from a reference at compiletime will only
1806 /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
1807 /// cannot be stricter aligned than the reference's underlying allocation.
1808 ///
1809 /// ```
1810 /// #![feature(pointer_is_aligned)]
1811 /// #![feature(const_pointer_is_aligned)]
1812 /// #![feature(const_mut_refs)]
1813 ///
1814 /// // On some platforms, the alignment of i32 is less than 4.
1815 /// #[repr(align(4))]
1816 /// struct AlignedI32(i32);
1817 ///
1818 /// const _: () = {
1819 /// let mut data = AlignedI32(42);
1820 /// let ptr = &mut data as *mut AlignedI32;
1821 ///
1822 /// assert!(ptr.is_aligned_to(1));
1823 /// assert!(ptr.is_aligned_to(2));
1824 /// assert!(ptr.is_aligned_to(4));
1825 ///
1826 /// // At compiletime, we know for sure that the pointer isn't aligned to 8.
1827 /// assert!(!ptr.is_aligned_to(8));
1828 /// assert!(!ptr.wrapping_add(1).is_aligned_to(8));
1829 /// };
1830 /// ```
1831 ///
1832 /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
1833 /// pointer is aligned, even if the compiletime pointer wasn't aligned.
1834 ///
1835 /// ```
1836 /// #![feature(pointer_is_aligned)]
1837 /// #![feature(const_pointer_is_aligned)]
1838 ///
1839 /// // On some platforms, the alignment of i32 is less than 4.
1840 /// #[repr(align(4))]
1841 /// struct AlignedI32(i32);
1842 ///
1843 /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned.
1844 /// // Also, note that mutable references are not allowed in the final value of constants.
1845 /// const COMPTIME_PTR: *mut AlignedI32 = (&AlignedI32(42) as *const AlignedI32).cast_mut();
1846 /// const _: () = assert!(!COMPTIME_PTR.is_aligned_to(8));
1847 /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).is_aligned_to(8));
1848 ///
1849 /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned.
1850 /// let runtime_ptr = COMPTIME_PTR;
1851 /// assert_ne!(
1852 /// runtime_ptr.is_aligned_to(8),
1853 /// runtime_ptr.wrapping_add(1).is_aligned_to(8),
1854 /// );
1855 /// ```
1856 ///
1857 /// If a pointer is created from a fixed address, this function behaves the same during
1858 /// runtime and compiletime.
1859 ///
1860 /// ```
1861 /// #![feature(pointer_is_aligned)]
1862 /// #![feature(const_pointer_is_aligned)]
1863 ///
1864 /// const _: () = {
1865 /// let ptr = 40 as *mut u8;
1866 /// assert!(ptr.is_aligned_to(1));
1867 /// assert!(ptr.is_aligned_to(2));
1868 /// assert!(ptr.is_aligned_to(4));
1869 /// assert!(ptr.is_aligned_to(8));
1870 /// assert!(!ptr.is_aligned_to(16));
1871 /// };
1872 /// ```
1873 ///
1874 /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203
1875 #[must_use]
1876 #[inline]
1877 #[unstable(feature = "pointer_is_aligned", issue = "96284")]
1878 #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")]
1879 pub const fn is_aligned_to(self, align: usize) -> bool {
1880 if !align.is_power_of_two() {
1881 panic!("is_aligned_to: align is not a power-of-two");
1882 }
1883
1884 #[inline]
1885 fn runtime_impl(ptr: *mut (), align: usize) -> bool {
1886 ptr.addr() & (align - 1) == 0
1887 }
1888
1889 #[inline]
1890 const fn const_impl(ptr: *mut (), align: usize) -> bool {
1891 // We can't use the address of `self` in a `const fn`, so we use `align_offset` instead.
1892 // The cast to `()` is used to
1893 // 1. deal with fat pointers; and
1894 // 2. ensure that `align_offset` doesn't actually try to compute an offset.
1895 ptr.align_offset(align) == 0
1896 }
1897
1898 // SAFETY: The two versions are equivalent at runtime.
1899 unsafe { const_eval_select((self.cast::<()>(), align), const_impl, runtime_impl) }
1900 }
1901}
1902
1903impl<T> *mut [T] {
1904 /// Returns the length of a raw slice.
1905 ///
1906 /// The returned value is the number of **elements**, not the number of bytes.
1907 ///
1908 /// This function is safe, even when the raw slice cannot be cast to a slice
1909 /// reference because the pointer is null or unaligned.
1910 ///
1911 /// # Examples
1912 ///
1913 /// ```rust
1914 /// #![feature(slice_ptr_len)]
1915 /// use std::ptr;
1916 ///
1917 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1918 /// assert_eq!(slice.len(), 3);
1919 /// ```
1920 #[inline(always)]
1921 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1922 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1923 pub const fn len(self) -> usize {
1924 metadata(self)
1925 }
1926
1927 /// Returns `true` if the raw slice has a length of 0.
1928 ///
1929 /// # Examples
1930 ///
1931 /// ```
1932 /// #![feature(slice_ptr_len)]
1933 /// use std::ptr;
1934 ///
1935 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1936 /// assert!(!slice.is_empty());
1937 /// ```
1938 #[inline(always)]
1939 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1940 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1941 pub const fn is_empty(self) -> bool {
1942 self.len() == 0
1943 }
1944
1945 /// Divides one mutable raw slice into two at an index.
1946 ///
1947 /// The first will contain all indices from `[0, mid)` (excluding
1948 /// the index `mid` itself) and the second will contain all
1949 /// indices from `[mid, len)` (excluding the index `len` itself).
1950 ///
1951 /// # Panics
1952 ///
1953 /// Panics if `mid > len`.
1954 ///
1955 /// # Safety
1956 ///
1957 /// `mid` must be [in-bounds] of the underlying [allocated object].
1958 /// Which means `self` must be dereferenceable and span a single allocation
1959 /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1960 /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1961 ///
1962 /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1963 /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1964 /// The explicit bounds check is only as useful as `len` is correct.
1965 ///
1966 /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1967 /// [in-bounds]: #method.add
1968 /// [allocated object]: crate::ptr#allocated-object
1969 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1970 ///
1971 /// # Examples
1972 ///
1973 /// ```
1974 /// #![feature(raw_slice_split)]
1975 /// #![feature(slice_ptr_get)]
1976 ///
1977 /// let mut v = [1, 0, 3, 0, 5, 6];
1978 /// let ptr = &mut v as *mut [_];
1979 /// unsafe {
1980 /// let (left, right) = ptr.split_at_mut(2);
1981 /// assert_eq!(&*left, [1, 0]);
1982 /// assert_eq!(&*right, [3, 0, 5, 6]);
1983 /// }
1984 /// ```
1985 #[inline(always)]
1986 #[track_caller]
1987 #[unstable(feature = "raw_slice_split", issue = "95595")]
1988 pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1989 assert!(mid <= self.len());
1990 // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1991 // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1992 unsafe { self.split_at_mut_unchecked(mid) }
1993 }
1994
1995 /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1996 ///
1997 /// The first will contain all indices from `[0, mid)` (excluding
1998 /// the index `mid` itself) and the second will contain all
1999 /// indices from `[mid, len)` (excluding the index `len` itself).
2000 ///
2001 /// # Safety
2002 ///
2003 /// `mid` must be [in-bounds] of the underlying [allocated object].
2004 /// Which means `self` must be dereferenceable and span a single allocation
2005 /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
2006 /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
2007 ///
2008 /// [in-bounds]: #method.add
2009 /// [out-of-bounds index]: #method.add
2010 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
2011 ///
2012 /// # Examples
2013 ///
2014 /// ```
2015 /// #![feature(raw_slice_split)]
2016 ///
2017 /// let mut v = [1, 0, 3, 0, 5, 6];
2018 /// // scoped to restrict the lifetime of the borrows
2019 /// unsafe {
2020 /// let ptr = &mut v as *mut [_];
2021 /// let (left, right) = ptr.split_at_mut_unchecked(2);
2022 /// assert_eq!(&*left, [1, 0]);
2023 /// assert_eq!(&*right, [3, 0, 5, 6]);
2024 /// (&mut *left)[1] = 2;
2025 /// (&mut *right)[1] = 4;
2026 /// }
2027 /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
2028 /// ```
2029 #[inline(always)]
2030 #[unstable(feature = "raw_slice_split", issue = "95595")]
2031 pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
2032 let len = self.len();
2033 let ptr = self.as_mut_ptr();
2034
2035 // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
2036 let tail = unsafe { ptr.add(mid) };
2037 (
2038 crate::ptr::slice_from_raw_parts_mut(ptr, mid),
2039 crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
2040 )
2041 }
2042
2043 /// Returns a raw pointer to the slice's buffer.
2044 ///
2045 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2046 ///
2047 /// # Examples
2048 ///
2049 /// ```rust
2050 /// #![feature(slice_ptr_get)]
2051 /// use std::ptr;
2052 ///
2053 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
2054 /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
2055 /// ```
2056 #[inline(always)]
2057 #[unstable(feature = "slice_ptr_get", issue = "74265")]
2058 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
2059 pub const fn as_mut_ptr(self) -> *mut T {
2060 self as *mut T
2061 }
2062
2063 /// Returns a raw pointer to an element or subslice, without doing bounds
2064 /// checking.
2065 ///
2066 /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
2067 /// is *[undefined behavior]* even if the resulting pointer is not used.
2068 ///
2069 /// [out-of-bounds index]: #method.add
2070 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
2071 ///
2072 /// # Examples
2073 ///
2074 /// ```
2075 /// #![feature(slice_ptr_get)]
2076 ///
2077 /// let x = &mut [1, 2, 4] as *mut [i32];
2078 ///
2079 /// unsafe {
2080 /// assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
2081 /// }
2082 /// ```
2083 #[unstable(feature = "slice_ptr_get", issue = "74265")]
2084 #[inline(always)]
2085 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
2086 where
2087 I: SliceIndex<[T]>,
2088 {
2089 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
2090 unsafe { index.get_unchecked_mut(self) }
2091 }
2092
2093 /// Returns `None` if the pointer is null, or else returns a shared slice to
2094 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
2095 /// that the value has to be initialized.
2096 ///
2097 /// For the mutable counterpart see [`as_uninit_slice_mut`].
2098 ///
2099 /// [`as_ref`]: pointer#method.as_ref-1
2100 /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
2101 ///
2102 /// # Safety
2103 ///
2104 /// When calling this method, you have to ensure that *either* the pointer is null *or*
2105 /// all of the following is true:
2106 ///
2107 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
2108 /// and it must be properly aligned. This means in particular:
2109 ///
2110 /// * The entire memory range of this slice must be contained within a single [allocated object]!
2111 /// Slices can never span across multiple allocated objects.
2112 ///
2113 /// * The pointer must be aligned even for zero-length slices. One
2114 /// reason for this is that enum layout optimizations may rely on references
2115 /// (including slices of any length) being aligned and non-null to distinguish
2116 /// them from other data. You can obtain a pointer that is usable as `data`
2117 /// for zero-length slices using [`NonNull::dangling()`].
2118 ///
2119 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
2120 /// See the safety documentation of [`pointer::offset`].
2121 ///
2122 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
2123 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
2124 /// In particular, while this reference exists, the memory the pointer points to must
2125 /// not get mutated (except inside `UnsafeCell`).
2126 ///
2127 /// This applies even if the result of this method is unused!
2128 ///
2129 /// See also [`slice::from_raw_parts`][].
2130 ///
2131 /// [valid]: crate::ptr#safety
2132 /// [allocated object]: crate::ptr#allocated-object
2133 #[inline]
2134 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
2135 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
2136 pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
2137 if self.is_null() {
2138 None
2139 } else {
2140 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
2141 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
2142 }
2143 }
2144
2145 /// Returns `None` if the pointer is null, or else returns a unique slice to
2146 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
2147 /// that the value has to be initialized.
2148 ///
2149 /// For the shared counterpart see [`as_uninit_slice`].
2150 ///
2151 /// [`as_mut`]: #method.as_mut
2152 /// [`as_uninit_slice`]: #method.as_uninit_slice-1
2153 ///
2154 /// # Safety
2155 ///
2156 /// When calling this method, you have to ensure that *either* the pointer is null *or*
2157 /// all of the following is true:
2158 ///
2159 /// * The pointer must be [valid] for reads and writes for `ptr.len() * mem::size_of::<T>()`
2160 /// many bytes, and it must be properly aligned. This means in particular:
2161 ///
2162 /// * The entire memory range of this slice must be contained within a single [allocated object]!
2163 /// Slices can never span across multiple allocated objects.
2164 ///
2165 /// * The pointer must be aligned even for zero-length slices. One
2166 /// reason for this is that enum layout optimizations may rely on references
2167 /// (including slices of any length) being aligned and non-null to distinguish
2168 /// them from other data. You can obtain a pointer that is usable as `data`
2169 /// for zero-length slices using [`NonNull::dangling()`].
2170 ///
2171 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
2172 /// See the safety documentation of [`pointer::offset`].
2173 ///
2174 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
2175 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
2176 /// In particular, while this reference exists, the memory the pointer points to must
2177 /// not get accessed (read or written) through any other pointer.
2178 ///
2179 /// This applies even if the result of this method is unused!
2180 ///
2181 /// See also [`slice::from_raw_parts_mut`][].
2182 ///
2183 /// [valid]: crate::ptr#safety
2184 /// [allocated object]: crate::ptr#allocated-object
2185 #[inline]
2186 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
2187 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
2188 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
2189 if self.is_null() {
2190 None
2191 } else {
2192 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
2193 Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
2194 }
2195 }
2196}
2197
2198// Equality for pointers
2199#[stable(feature = "rust1", since = "1.0.0")]
2200impl<T: ?Sized> PartialEq for *mut T {
2201 #[inline(always)]
2202 #[allow(ambiguous_wide_pointer_comparisons)]
2203 fn eq(&self, other: &*mut T) -> bool {
2204 *self == *other
2205 }
2206}
2207
2208#[stable(feature = "rust1", since = "1.0.0")]
2209impl<T: ?Sized> Eq for *mut T {}
2210
2211#[stable(feature = "rust1", since = "1.0.0")]
2212impl<T: ?Sized> Ord for *mut T {
2213 #[inline]
2214 #[allow(ambiguous_wide_pointer_comparisons)]
2215 fn cmp(&self, other: &*mut T) -> Ordering {
2216 if self < other {
2217 Less
2218 } else if self == other {
2219 Equal
2220 } else {
2221 Greater
2222 }
2223 }
2224}
2225
2226#[stable(feature = "rust1", since = "1.0.0")]
2227impl<T: ?Sized> PartialOrd for *mut T {
2228 #[inline(always)]
2229 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2230 Some(self.cmp(other))
2231 }
2232
2233 #[inline(always)]
2234 #[allow(ambiguous_wide_pointer_comparisons)]
2235 fn lt(&self, other: &*mut T) -> bool {
2236 *self < *other
2237 }
2238
2239 #[inline(always)]
2240 #[allow(ambiguous_wide_pointer_comparisons)]
2241 fn le(&self, other: &*mut T) -> bool {
2242 *self <= *other
2243 }
2244
2245 #[inline(always)]
2246 #[allow(ambiguous_wide_pointer_comparisons)]
2247 fn gt(&self, other: &*mut T) -> bool {
2248 *self > *other
2249 }
2250
2251 #[inline(always)]
2252 #[allow(ambiguous_wide_pointer_comparisons)]
2253 fn ge(&self, other: &*mut T) -> bool {
2254 *self >= *other
2255 }
2256}
2257