1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::SizedTypeProperties;
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *mut T {
8 /// Returns `true` if the pointer is null.
9 ///
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
13 /// each other.
14 ///
15 /// ## Behavior during const evaluation
16 ///
17 /// When this function is used during const evaluation, it may return `false` for pointers
18 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
19 /// is offset beyond its bounds in such a way that the resulting pointer is null,
20 /// the function will still return `false`. There is no way for CTFE to know
21 /// the absolute position of that memory, so we cannot tell if the pointer is
22 /// null or not.
23 ///
24 /// # Examples
25 ///
26 /// ```
27 /// let mut s = [1, 2, 3];
28 /// let ptr: *mut u32 = s.as_mut_ptr();
29 /// assert!(!ptr.is_null());
30 /// ```
31 #[stable(feature = "rust1", since = "1.0.0")]
32 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
33 #[rustc_diagnostic_item = "ptr_is_null"]
34 #[inline]
35 pub const fn is_null(self) -> bool {
36 #[inline]
37 fn runtime_impl(ptr: *mut u8) -> bool {
38 ptr.addr() == 0
39 }
40
41 #[inline]
42 const fn const_impl(ptr: *mut u8) -> bool {
43 // Compare via a cast to a thin pointer, so fat pointers are only
44 // considering their "data" part for null-ness.
45 match (ptr).guaranteed_eq(null_mut()) {
46 None => false,
47 Some(res) => res,
48 }
49 }
50
51 const_eval_select((self as *mut u8,), const_impl, runtime_impl)
52 }
53
54 /// Casts to a pointer of another type.
55 #[stable(feature = "ptr_cast", since = "1.38.0")]
56 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
57 #[rustc_diagnostic_item = "ptr_cast"]
58 #[inline(always)]
59 pub const fn cast<U>(self) -> *mut U {
60 self as _
61 }
62
63 /// Use the pointer value in a new pointer of another type.
64 ///
65 /// In case `meta` is a (fat) pointer to an unsized type, this operation
66 /// will ignore the pointer part, whereas for (thin) pointers to sized
67 /// types, this has the same effect as a simple cast.
68 ///
69 /// The resulting pointer will have provenance of `self`, i.e., for a fat
70 /// pointer, this operation is semantically the same as creating a new
71 /// fat pointer with the data pointer value of `self` but the metadata of
72 /// `meta`.
73 ///
74 /// # Examples
75 ///
76 /// This function is primarily useful for allowing byte-wise pointer
77 /// arithmetic on potentially fat pointers:
78 ///
79 /// ```
80 /// #![feature(set_ptr_value)]
81 /// # use core::fmt::Debug;
82 /// let mut arr: [i32; 3] = [1, 2, 3];
83 /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
84 /// let thin = ptr as *mut u8;
85 /// unsafe {
86 /// ptr = thin.add(8).with_metadata_of(ptr);
87 /// # assert_eq!(*(ptr as *mut i32), 3);
88 /// println!("{:?}", &*ptr); // will print "3"
89 /// }
90 /// ```
91 #[unstable(feature = "set_ptr_value", issue = "75091")]
92 #[rustc_const_unstable(feature = "set_ptr_value", issue = "75091")]
93 #[must_use = "returns a new pointer rather than modifying its argument"]
94 #[inline]
95 pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
96 where
97 U: ?Sized,
98 {
99 from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
100 }
101
102 /// Changes constness without changing the type.
103 ///
104 /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
105 /// refactored.
106 ///
107 /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
108 /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
109 /// coercion.
110 ///
111 /// [`cast_mut`]: pointer::cast_mut
112 #[stable(feature = "ptr_const_cast", since = "1.65.0")]
113 #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
114 #[rustc_diagnostic_item = "ptr_cast_const"]
115 #[inline(always)]
116 pub const fn cast_const(self) -> *const T {
117 self as _
118 }
119
120 /// Casts a pointer to its raw bits.
121 ///
122 /// This is equivalent to `as usize`, but is more specific to enhance readability.
123 /// The inverse method is [`from_bits`](pointer#method.from_bits-1).
124 ///
125 /// In particular, `*p as usize` and `p as usize` will both compile for
126 /// pointers to numeric types but do very different things, so using this
127 /// helps emphasize that reading the bits was intentional.
128 ///
129 /// # Examples
130 ///
131 /// ```
132 /// #![feature(ptr_to_from_bits)]
133 /// # #[cfg(not(miri))] { // doctest does not work with strict provenance
134 /// let mut array = [13, 42];
135 /// let mut it = array.iter_mut();
136 /// let p0: *mut i32 = it.next().unwrap();
137 /// assert_eq!(<*mut _>::from_bits(p0.to_bits()), p0);
138 /// let p1: *mut i32 = it.next().unwrap();
139 /// assert_eq!(p1.to_bits() - p0.to_bits(), 4);
140 /// }
141 /// ```
142 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
143 #[deprecated(
144 since = "1.67.0",
145 note = "replaced by the `expose_provenance` method, or update your code \
146 to follow the strict provenance rules using its APIs"
147 )]
148 #[inline(always)]
149 pub fn to_bits(self) -> usize
150 where
151 T: Sized,
152 {
153 self as usize
154 }
155
156 /// Creates a pointer from its raw bits.
157 ///
158 /// This is equivalent to `as *mut T`, but is more specific to enhance readability.
159 /// The inverse method is [`to_bits`](pointer#method.to_bits-1).
160 ///
161 /// # Examples
162 ///
163 /// ```
164 /// #![feature(ptr_to_from_bits)]
165 /// # #[cfg(not(miri))] { // doctest does not work with strict provenance
166 /// use std::ptr::NonNull;
167 /// let dangling: *mut u8 = NonNull::dangling().as_ptr();
168 /// assert_eq!(<*mut u8>::from_bits(1), dangling);
169 /// }
170 /// ```
171 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
172 #[deprecated(
173 since = "1.67.0",
174 note = "replaced by the `ptr::with_exposed_provenance_mut` function, or \
175 update your code to follow the strict provenance rules using its APIs"
176 )]
177 #[allow(fuzzy_provenance_casts)] // this is an unstable and semi-deprecated cast function
178 #[inline(always)]
179 pub fn from_bits(bits: usize) -> Self
180 where
181 T: Sized,
182 {
183 bits as Self
184 }
185
186 /// Gets the "address" portion of the pointer.
187 ///
188 /// This is similar to `self as usize`, which semantically discards *provenance* and
189 /// *address-space* information. However, unlike `self as usize`, casting the returned address
190 /// back to a pointer yields yields a [pointer without provenance][without_provenance_mut], which is undefined
191 /// behavior to dereference. To properly restore the lost information and obtain a
192 /// dereferenceable pointer, use [`with_addr`][pointer::with_addr] or
193 /// [`map_addr`][pointer::map_addr].
194 ///
195 /// If using those APIs is not possible because there is no way to preserve a pointer with the
196 /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
197 /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
198 /// instead. However, note that this makes your code less portable and less amenable to tools
199 /// that check for compliance with the Rust memory model.
200 ///
201 /// On most platforms this will produce a value with the same bytes as the original
202 /// pointer, because all the bytes are dedicated to describing the address.
203 /// Platforms which need to store additional information in the pointer may
204 /// perform a change of representation to produce a value containing only the address
205 /// portion of the pointer. What that means is up to the platform to define.
206 ///
207 /// This API and its claimed semantics are part of the Strict Provenance experiment, and as such
208 /// might change in the future (including possibly weakening this so it becomes wholly
209 /// equivalent to `self as usize`). See the [module documentation][crate::ptr] for details.
210 #[must_use]
211 #[inline(always)]
212 #[unstable(feature = "strict_provenance", issue = "95228")]
213 pub fn addr(self) -> usize {
214 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
215 // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
216 // provenance).
217 unsafe { mem::transmute(self.cast::<()>()) }
218 }
219
220 /// Exposes the "provenance" part of the pointer for future use in
221 /// [`with_exposed_provenance`][] and returns the "address" portion.
222 ///
223 /// This is equivalent to `self as usize`, which semantically discards *provenance* and
224 /// *address-space* information. Furthermore, this (like the `as` cast) has the implicit
225 /// side-effect of marking the provenance as 'exposed', so on platforms that support it you can
226 /// later call [`with_exposed_provenance_mut`][] to reconstitute the original pointer including its
227 /// provenance. (Reconstructing address space information, if required, is your responsibility.)
228 ///
229 /// Using this method means that code is *not* following [Strict
230 /// Provenance][super#strict-provenance] rules. Supporting
231 /// [`with_exposed_provenance_mut`][] complicates specification and reasoning and may not be supported
232 /// by tools that help you to stay conformant with the Rust memory model, so it is recommended
233 /// to use [`addr`][pointer::addr] wherever possible.
234 ///
235 /// On most platforms this will produce a value with the same bytes as the original pointer,
236 /// because all the bytes are dedicated to describing the address. Platforms which need to store
237 /// additional information in the pointer may not support this operation, since the 'expose'
238 /// side-effect which is required for [`with_exposed_provenance_mut`][] to work is typically not
239 /// available.
240 ///
241 /// It is unclear whether this method can be given a satisfying unambiguous specification. This
242 /// API and its claimed semantics are part of [Exposed Provenance][super#exposed-provenance].
243 ///
244 /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut
245 #[inline(always)]
246 #[unstable(feature = "exposed_provenance", issue = "95228")]
247 pub fn expose_provenance(self) -> usize {
248 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
249 self.cast::<()>() as usize
250 }
251
252 /// Creates a new pointer with the given address.
253 ///
254 /// This performs the same operation as an `addr as ptr` cast, but copies
255 /// the *address-space* and *provenance* of `self` to the new pointer.
256 /// This allows us to dynamically preserve and propagate this important
257 /// information in a way that is otherwise impossible with a unary cast.
258 ///
259 /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
260 /// `self` to the given address, and therefore has all the same capabilities and restrictions.
261 ///
262 /// This API and its claimed semantics are an extension to the Strict Provenance experiment,
263 /// see the [module documentation][crate::ptr] for details.
264 #[must_use]
265 #[inline]
266 #[unstable(feature = "strict_provenance", issue = "95228")]
267 pub fn with_addr(self, addr: usize) -> Self {
268 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
269 //
270 // In the mean-time, this operation is defined to be "as if" it was
271 // a wrapping_offset, so we can emulate it as such. This should properly
272 // restore pointer provenance even under today's compiler.
273 let self_addr = self.addr() as isize;
274 let dest_addr = addr as isize;
275 let offset = dest_addr.wrapping_sub(self_addr);
276
277 // This is the canonical desugaring of this operation
278 self.wrapping_byte_offset(offset)
279 }
280
281 /// Creates a new pointer by mapping `self`'s address to a new one.
282 ///
283 /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
284 ///
285 /// This API and its claimed semantics are part of the Strict Provenance experiment,
286 /// see the [module documentation][crate::ptr] for details.
287 #[must_use]
288 #[inline]
289 #[unstable(feature = "strict_provenance", issue = "95228")]
290 pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
291 self.with_addr(f(self.addr()))
292 }
293
294 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
295 ///
296 /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
297 #[unstable(feature = "ptr_metadata", issue = "81513")]
298 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
299 #[inline]
300 pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
301 (self.cast(), super::metadata(self))
302 }
303
304 /// Returns `None` if the pointer is null, or else returns a shared reference to
305 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
306 /// must be used instead.
307 ///
308 /// For the mutable counterpart see [`as_mut`].
309 ///
310 /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
311 /// [`as_mut`]: #method.as_mut
312 ///
313 /// # Safety
314 ///
315 /// When calling this method, you have to ensure that *either* the pointer is null *or*
316 /// all of the following is true:
317 ///
318 /// * The pointer must be properly aligned.
319 ///
320 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
321 ///
322 /// * The pointer must point to an initialized instance of `T`.
323 ///
324 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
325 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
326 /// In particular, while this reference exists, the memory the pointer points to must
327 /// not get mutated (except inside `UnsafeCell`).
328 ///
329 /// This applies even if the result of this method is unused!
330 /// (The part about being initialized is not yet fully decided, but until
331 /// it is, the only safe approach is to ensure that they are indeed initialized.)
332 ///
333 /// [the module documentation]: crate::ptr#safety
334 ///
335 /// # Examples
336 ///
337 /// ```
338 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
339 ///
340 /// unsafe {
341 /// if let Some(val_back) = ptr.as_ref() {
342 /// println!("We got back the value: {val_back}!");
343 /// }
344 /// }
345 /// ```
346 ///
347 /// # Null-unchecked version
348 ///
349 /// If you are sure the pointer can never be null and are looking for some kind of
350 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
351 /// dereference the pointer directly.
352 ///
353 /// ```
354 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
355 ///
356 /// unsafe {
357 /// let val_back = &*ptr;
358 /// println!("We got back the value: {val_back}!");
359 /// }
360 /// ```
361 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
362 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
363 #[inline]
364 pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
365 // SAFETY: the caller must guarantee that `self` is valid for a
366 // reference if it isn't null.
367 if self.is_null() { None } else { unsafe { Some(&*self) } }
368 }
369
370 /// Returns `None` if the pointer is null, or else returns a shared reference to
371 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
372 /// that the value has to be initialized.
373 ///
374 /// For the mutable counterpart see [`as_uninit_mut`].
375 ///
376 /// [`as_ref`]: pointer#method.as_ref-1
377 /// [`as_uninit_mut`]: #method.as_uninit_mut
378 ///
379 /// # Safety
380 ///
381 /// When calling this method, you have to ensure that *either* the pointer is null *or*
382 /// all of the following is true:
383 ///
384 /// * The pointer must be properly aligned.
385 ///
386 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
387 ///
388 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
389 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
390 /// In particular, while this reference exists, the memory the pointer points to must
391 /// not get mutated (except inside `UnsafeCell`).
392 ///
393 /// This applies even if the result of this method is unused!
394 ///
395 /// [the module documentation]: crate::ptr#safety
396 ///
397 /// # Examples
398 ///
399 /// ```
400 /// #![feature(ptr_as_uninit)]
401 ///
402 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
403 ///
404 /// unsafe {
405 /// if let Some(val_back) = ptr.as_uninit_ref() {
406 /// println!("We got back the value: {}!", val_back.assume_init());
407 /// }
408 /// }
409 /// ```
410 #[inline]
411 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
412 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
413 pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
414 where
415 T: Sized,
416 {
417 // SAFETY: the caller must guarantee that `self` meets all the
418 // requirements for a reference.
419 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
420 }
421
422 /// Calculates the offset from a pointer.
423 ///
424 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
425 /// offset of `3 * size_of::<T>()` bytes.
426 ///
427 /// # Safety
428 ///
429 /// If any of the following conditions are violated, the result is Undefined
430 /// Behavior:
431 ///
432 /// * Both the starting and resulting pointer must be either in bounds or one
433 /// byte past the end of the same [allocated object].
434 ///
435 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
436 ///
437 /// * The offset being in bounds cannot rely on "wrapping around" the address
438 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
439 ///
440 /// The compiler and standard library generally tries to ensure allocations
441 /// never reach a size where an offset is a concern. For instance, `Vec`
442 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
443 /// `vec.as_ptr().add(vec.len())` is always safe.
444 ///
445 /// Most platforms fundamentally can't even construct such an allocation.
446 /// For instance, no known 64-bit platform can ever serve a request
447 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
448 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
449 /// more than `isize::MAX` bytes with things like Physical Address
450 /// Extension. As such, memory acquired directly from allocators or memory
451 /// mapped files *may* be too large to handle with this function.
452 ///
453 /// Consider using [`wrapping_offset`] instead if these constraints are
454 /// difficult to satisfy. The only advantage of this method is that it
455 /// enables more aggressive compiler optimizations.
456 ///
457 /// [`wrapping_offset`]: #method.wrapping_offset
458 /// [allocated object]: crate::ptr#allocated-object
459 ///
460 /// # Examples
461 ///
462 /// ```
463 /// let mut s = [1, 2, 3];
464 /// let ptr: *mut u32 = s.as_mut_ptr();
465 ///
466 /// unsafe {
467 /// assert_eq!(2, *ptr.offset(1));
468 /// assert_eq!(3, *ptr.offset(2));
469 /// }
470 /// ```
471 #[stable(feature = "rust1", since = "1.0.0")]
472 #[must_use = "returns a new pointer rather than modifying its argument"]
473 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
474 #[inline(always)]
475 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
476 pub const unsafe fn offset(self, count: isize) -> *mut T
477 where
478 T: Sized,
479 {
480 // SAFETY: the caller must uphold the safety contract for `offset`.
481 // The obtained pointer is valid for writes since the caller must
482 // guarantee that it points to the same allocated object as `self`.
483 unsafe { intrinsics::offset(self, count) }
484 }
485
486 /// Calculates the offset from a pointer in bytes.
487 ///
488 /// `count` is in units of **bytes**.
489 ///
490 /// This is purely a convenience for casting to a `u8` pointer and
491 /// using [offset][pointer::offset] on it. See that method for documentation
492 /// and safety requirements.
493 ///
494 /// For non-`Sized` pointees this operation changes only the data pointer,
495 /// leaving the metadata untouched.
496 #[must_use]
497 #[inline(always)]
498 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
499 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
500 #[rustc_allow_const_fn_unstable(set_ptr_value)]
501 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
502 pub const unsafe fn byte_offset(self, count: isize) -> Self {
503 // SAFETY: the caller must uphold the safety contract for `offset`.
504 unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
505 }
506
507 /// Calculates the offset from a pointer using wrapping arithmetic.
508 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
509 /// offset of `3 * size_of::<T>()` bytes.
510 ///
511 /// # Safety
512 ///
513 /// This operation itself is always safe, but using the resulting pointer is not.
514 ///
515 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
516 /// be used to read or write other allocated objects.
517 ///
518 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
519 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
520 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
521 /// `x` and `y` point into the same allocated object.
522 ///
523 /// Compared to [`offset`], this method basically delays the requirement of staying within the
524 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
525 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
526 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
527 /// can be optimized better and is thus preferable in performance-sensitive code.
528 ///
529 /// The delayed check only considers the value of the pointer that was dereferenced, not the
530 /// intermediate values used during the computation of the final result. For example,
531 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
532 /// words, leaving the allocated object and then re-entering it later is permitted.
533 ///
534 /// [`offset`]: #method.offset
535 /// [allocated object]: crate::ptr#allocated-object
536 ///
537 /// # Examples
538 ///
539 /// ```
540 /// // Iterate using a raw pointer in increments of two elements
541 /// let mut data = [1u8, 2, 3, 4, 5];
542 /// let mut ptr: *mut u8 = data.as_mut_ptr();
543 /// let step = 2;
544 /// let end_rounded_up = ptr.wrapping_offset(6);
545 ///
546 /// while ptr != end_rounded_up {
547 /// unsafe {
548 /// *ptr = 0;
549 /// }
550 /// ptr = ptr.wrapping_offset(step);
551 /// }
552 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
553 /// ```
554 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
555 #[must_use = "returns a new pointer rather than modifying its argument"]
556 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
557 #[inline(always)]
558 pub const fn wrapping_offset(self, count: isize) -> *mut T
559 where
560 T: Sized,
561 {
562 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
563 unsafe { intrinsics::arith_offset(self, count) as *mut T }
564 }
565
566 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
567 ///
568 /// `count` is in units of **bytes**.
569 ///
570 /// This is purely a convenience for casting to a `u8` pointer and
571 /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
572 /// for documentation.
573 ///
574 /// For non-`Sized` pointees this operation changes only the data pointer,
575 /// leaving the metadata untouched.
576 #[must_use]
577 #[inline(always)]
578 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
579 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
580 #[rustc_allow_const_fn_unstable(set_ptr_value)]
581 pub const fn wrapping_byte_offset(self, count: isize) -> Self {
582 self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
583 }
584
585 /// Masks out bits of the pointer according to a mask.
586 ///
587 /// This is convenience for `ptr.map_addr(|a| a & mask)`.
588 ///
589 /// For non-`Sized` pointees this operation changes only the data pointer,
590 /// leaving the metadata untouched.
591 ///
592 /// ## Examples
593 ///
594 /// ```
595 /// #![feature(ptr_mask, strict_provenance)]
596 /// let mut v = 17_u32;
597 /// let ptr: *mut u32 = &mut v;
598 ///
599 /// // `u32` is 4 bytes aligned,
600 /// // which means that lower 2 bits are always 0.
601 /// let tag_mask = 0b11;
602 /// let ptr_mask = !tag_mask;
603 ///
604 /// // We can store something in these lower bits
605 /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
606 ///
607 /// // Get the "tag" back
608 /// let tag = tagged_ptr.addr() & tag_mask;
609 /// assert_eq!(tag, 0b10);
610 ///
611 /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
612 /// // To get original pointer `mask` can be used:
613 /// let masked_ptr = tagged_ptr.mask(ptr_mask);
614 /// assert_eq!(unsafe { *masked_ptr }, 17);
615 ///
616 /// unsafe { *masked_ptr = 0 };
617 /// assert_eq!(v, 0);
618 /// ```
619 #[unstable(feature = "ptr_mask", issue = "98290")]
620 #[must_use = "returns a new pointer rather than modifying its argument"]
621 #[inline(always)]
622 pub fn mask(self, mask: usize) -> *mut T {
623 intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
624 }
625
626 /// Returns `None` if the pointer is null, or else returns a unique reference to
627 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
628 /// must be used instead.
629 ///
630 /// For the shared counterpart see [`as_ref`].
631 ///
632 /// [`as_uninit_mut`]: #method.as_uninit_mut
633 /// [`as_ref`]: pointer#method.as_ref-1
634 ///
635 /// # Safety
636 ///
637 /// When calling this method, you have to ensure that *either* the pointer is null *or*
638 /// all of the following is true:
639 ///
640 /// * The pointer must be properly aligned.
641 ///
642 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
643 ///
644 /// * The pointer must point to an initialized instance of `T`.
645 ///
646 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
647 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
648 /// In particular, while this reference exists, the memory the pointer points to must
649 /// not get accessed (read or written) through any other pointer.
650 ///
651 /// This applies even if the result of this method is unused!
652 /// (The part about being initialized is not yet fully decided, but until
653 /// it is, the only safe approach is to ensure that they are indeed initialized.)
654 ///
655 /// [the module documentation]: crate::ptr#safety
656 ///
657 /// # Examples
658 ///
659 /// ```
660 /// let mut s = [1, 2, 3];
661 /// let ptr: *mut u32 = s.as_mut_ptr();
662 /// let first_value = unsafe { ptr.as_mut().unwrap() };
663 /// *first_value = 4;
664 /// # assert_eq!(s, [4, 2, 3]);
665 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
666 /// ```
667 ///
668 /// # Null-unchecked version
669 ///
670 /// If you are sure the pointer can never be null and are looking for some kind of
671 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
672 /// you can dereference the pointer directly.
673 ///
674 /// ```
675 /// let mut s = [1, 2, 3];
676 /// let ptr: *mut u32 = s.as_mut_ptr();
677 /// let first_value = unsafe { &mut *ptr };
678 /// *first_value = 4;
679 /// # assert_eq!(s, [4, 2, 3]);
680 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
681 /// ```
682 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
683 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
684 #[inline]
685 pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
686 // SAFETY: the caller must guarantee that `self` is be valid for
687 // a mutable reference if it isn't null.
688 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
689 }
690
691 /// Returns `None` if the pointer is null, or else returns a unique reference to
692 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
693 /// that the value has to be initialized.
694 ///
695 /// For the shared counterpart see [`as_uninit_ref`].
696 ///
697 /// [`as_mut`]: #method.as_mut
698 /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
699 ///
700 /// # Safety
701 ///
702 /// When calling this method, you have to ensure that *either* the pointer is null *or*
703 /// all of the following is true:
704 ///
705 /// * The pointer must be properly aligned.
706 ///
707 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
708 ///
709 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
710 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
711 /// In particular, while this reference exists, the memory the pointer points to must
712 /// not get accessed (read or written) through any other pointer.
713 ///
714 /// This applies even if the result of this method is unused!
715 ///
716 /// [the module documentation]: crate::ptr#safety
717 #[inline]
718 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
719 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
720 pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
721 where
722 T: Sized,
723 {
724 // SAFETY: the caller must guarantee that `self` meets all the
725 // requirements for a reference.
726 if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
727 }
728
729 /// Returns whether two pointers are guaranteed to be equal.
730 ///
731 /// At runtime this function behaves like `Some(self == other)`.
732 /// However, in some contexts (e.g., compile-time evaluation),
733 /// it is not always possible to determine equality of two pointers, so this function may
734 /// spuriously return `None` for pointers that later actually turn out to have its equality known.
735 /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
736 ///
737 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
738 /// version and unsafe code must not
739 /// rely on the result of this function for soundness. It is suggested to only use this function
740 /// for performance optimizations where spurious `None` return values by this function do not
741 /// affect the outcome, but just the performance.
742 /// The consequences of using this method to make runtime and compile-time code behave
743 /// differently have not been explored. This method should not be used to introduce such
744 /// differences, and it should also not be stabilized before we have a better understanding
745 /// of this issue.
746 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
747 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
748 #[inline]
749 pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
750 where
751 T: Sized,
752 {
753 (self as *const T).guaranteed_eq(other as _)
754 }
755
756 /// Returns whether two pointers are guaranteed to be inequal.
757 ///
758 /// At runtime this function behaves like `Some(self != other)`.
759 /// However, in some contexts (e.g., compile-time evaluation),
760 /// it is not always possible to determine inequality of two pointers, so this function may
761 /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
762 /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
763 ///
764 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
765 /// version and unsafe code must not
766 /// rely on the result of this function for soundness. It is suggested to only use this function
767 /// for performance optimizations where spurious `None` return values by this function do not
768 /// affect the outcome, but just the performance.
769 /// The consequences of using this method to make runtime and compile-time code behave
770 /// differently have not been explored. This method should not be used to introduce such
771 /// differences, and it should also not be stabilized before we have a better understanding
772 /// of this issue.
773 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
774 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
775 #[inline]
776 pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
777 where
778 T: Sized,
779 {
780 (self as *const T).guaranteed_ne(other as _)
781 }
782
783 /// Calculates the distance between two pointers. The returned value is in
784 /// units of T: the distance in bytes divided by `mem::size_of::<T>()`.
785 ///
786 /// This is equivalent to `(self as isize - origin as isize) / (mem::size_of::<T>() as isize)`,
787 /// except that it has a lot more opportunities for UB, in exchange for the compiler
788 /// better understanding what you are doing.
789 ///
790 /// The primary motivation of this method is for computing the `len` of an array/slice
791 /// of `T` that you are currently representing as a "start" and "end" pointer
792 /// (and "end" is "one past the end" of the array).
793 /// In that case, `end.offset_from(start)` gets you the length of the array.
794 ///
795 /// All of the following safety requirements are trivially satisfied for this usecase.
796 ///
797 /// [`offset`]: pointer#method.offset-1
798 ///
799 /// # Safety
800 ///
801 /// If any of the following conditions are violated, the result is Undefined
802 /// Behavior:
803 ///
804 /// * Both `self` and `origin` must be either in bounds or one
805 /// byte past the end of the same [allocated object].
806 ///
807 /// * Both pointers must be *derived from* a pointer to the same object.
808 /// (See below for an example.)
809 ///
810 /// * The distance between the pointers, in bytes, must be an exact multiple
811 /// of the size of `T`.
812 ///
813 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
814 ///
815 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
816 ///
817 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
818 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
819 /// the last two conditions. The standard library also generally ensures that allocations
820 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
821 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
822 /// always satisfies the last two conditions.
823 ///
824 /// Most platforms fundamentally can't even construct such a large allocation.
825 /// For instance, no known 64-bit platform can ever serve a request
826 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
827 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
828 /// more than `isize::MAX` bytes with things like Physical Address
829 /// Extension. As such, memory acquired directly from allocators or memory
830 /// mapped files *may* be too large to handle with this function.
831 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
832 /// such large allocations either.)
833 ///
834 /// The requirement for pointers to be derived from the same allocated object is primarily
835 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
836 /// objects is not known at compile-time. However, the requirement also exists at
837 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
838 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
839 /// origin as isize) / mem::size_of::<T>()`.
840 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
841 ///
842 /// [`add`]: #method.add
843 /// [allocated object]: crate::ptr#allocated-object
844 ///
845 /// # Panics
846 ///
847 /// This function panics if `T` is a Zero-Sized Type ("ZST").
848 ///
849 /// # Examples
850 ///
851 /// Basic usage:
852 ///
853 /// ```
854 /// let mut a = [0; 5];
855 /// let ptr1: *mut i32 = &mut a[1];
856 /// let ptr2: *mut i32 = &mut a[3];
857 /// unsafe {
858 /// assert_eq!(ptr2.offset_from(ptr1), 2);
859 /// assert_eq!(ptr1.offset_from(ptr2), -2);
860 /// assert_eq!(ptr1.offset(2), ptr2);
861 /// assert_eq!(ptr2.offset(-2), ptr1);
862 /// }
863 /// ```
864 ///
865 /// *Incorrect* usage:
866 ///
867 /// ```rust,no_run
868 /// let ptr1 = Box::into_raw(Box::new(0u8));
869 /// let ptr2 = Box::into_raw(Box::new(1u8));
870 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
871 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
872 /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff);
873 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
874 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
875 /// // computing their offset is undefined behavior, even though
876 /// // they point to the same address!
877 /// unsafe {
878 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
879 /// }
880 /// ```
881 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
882 #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
883 #[inline(always)]
884 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
885 pub const unsafe fn offset_from(self, origin: *const T) -> isize
886 where
887 T: Sized,
888 {
889 // SAFETY: the caller must uphold the safety contract for `offset_from`.
890 unsafe { (self as *const T).offset_from(origin) }
891 }
892
893 /// Calculates the distance between two pointers. The returned value is in
894 /// units of **bytes**.
895 ///
896 /// This is purely a convenience for casting to a `u8` pointer and
897 /// using [`offset_from`][pointer::offset_from] on it. See that method for
898 /// documentation and safety requirements.
899 ///
900 /// For non-`Sized` pointees this operation considers only the data pointers,
901 /// ignoring the metadata.
902 #[inline(always)]
903 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
904 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
905 #[rustc_allow_const_fn_unstable(set_ptr_value)]
906 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
907 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
908 // SAFETY: the caller must uphold the safety contract for `offset_from`.
909 unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
910 }
911
912 /// Calculates the distance between two pointers, *where it's known that
913 /// `self` is equal to or greater than `origin`*. The returned value is in
914 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
915 ///
916 /// This computes the same value that [`offset_from`](#method.offset_from)
917 /// would compute, but with the added precondition that the offset is
918 /// guaranteed to be non-negative. This method is equivalent to
919 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
920 /// but it provides slightly more information to the optimizer, which can
921 /// sometimes allow it to optimize slightly better with some backends.
922 ///
923 /// This method can be though of as recovering the `count` that was passed
924 /// to [`add`](#method.add) (or, with the parameters in the other order,
925 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
926 /// that their safety preconditions are met:
927 /// ```rust
928 /// # #![feature(ptr_sub_ptr)]
929 /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool {
930 /// ptr.sub_ptr(origin) == count
931 /// # &&
932 /// origin.add(count) == ptr
933 /// # &&
934 /// ptr.sub(count) == origin
935 /// # }
936 /// ```
937 ///
938 /// # Safety
939 ///
940 /// - The distance between the pointers must be non-negative (`self >= origin`)
941 ///
942 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
943 /// apply to this method as well; see it for the full details.
944 ///
945 /// Importantly, despite the return type of this method being able to represent
946 /// a larger offset, it's still *not permitted* to pass pointers which differ
947 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
948 /// always be less than or equal to `isize::MAX as usize`.
949 ///
950 /// # Panics
951 ///
952 /// This function panics if `T` is a Zero-Sized Type ("ZST").
953 ///
954 /// # Examples
955 ///
956 /// ```
957 /// #![feature(ptr_sub_ptr)]
958 ///
959 /// let mut a = [0; 5];
960 /// let p: *mut i32 = a.as_mut_ptr();
961 /// unsafe {
962 /// let ptr1: *mut i32 = p.add(1);
963 /// let ptr2: *mut i32 = p.add(3);
964 ///
965 /// assert_eq!(ptr2.sub_ptr(ptr1), 2);
966 /// assert_eq!(ptr1.add(2), ptr2);
967 /// assert_eq!(ptr2.sub(2), ptr1);
968 /// assert_eq!(ptr2.sub_ptr(ptr2), 0);
969 /// }
970 ///
971 /// // This would be incorrect, as the pointers are not correctly ordered:
972 /// // ptr1.offset_from(ptr2)
973 #[unstable(feature = "ptr_sub_ptr", issue = "95892")]
974 #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")]
975 #[inline]
976 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
977 pub const unsafe fn sub_ptr(self, origin: *const T) -> usize
978 where
979 T: Sized,
980 {
981 // SAFETY: the caller must uphold the safety contract for `sub_ptr`.
982 unsafe { (self as *const T).sub_ptr(origin) }
983 }
984
985 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
986 ///
987 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
988 /// offset of `3 * size_of::<T>()` bytes.
989 ///
990 /// # Safety
991 ///
992 /// If any of the following conditions are violated, the result is Undefined
993 /// Behavior:
994 ///
995 /// * Both the starting and resulting pointer must be either in bounds or one
996 /// byte past the end of the same [allocated object].
997 ///
998 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
999 ///
1000 /// * The offset being in bounds cannot rely on "wrapping around" the address
1001 /// space. That is, the infinite-precision sum must fit in a `usize`.
1002 ///
1003 /// The compiler and standard library generally tries to ensure allocations
1004 /// never reach a size where an offset is a concern. For instance, `Vec`
1005 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
1006 /// `vec.as_ptr().add(vec.len())` is always safe.
1007 ///
1008 /// Most platforms fundamentally can't even construct such an allocation.
1009 /// For instance, no known 64-bit platform can ever serve a request
1010 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
1011 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
1012 /// more than `isize::MAX` bytes with things like Physical Address
1013 /// Extension. As such, memory acquired directly from allocators or memory
1014 /// mapped files *may* be too large to handle with this function.
1015 ///
1016 /// Consider using [`wrapping_add`] instead if these constraints are
1017 /// difficult to satisfy. The only advantage of this method is that it
1018 /// enables more aggressive compiler optimizations.
1019 ///
1020 /// [`wrapping_add`]: #method.wrapping_add
1021 /// [allocated object]: crate::ptr#allocated-object
1022 ///
1023 /// # Examples
1024 ///
1025 /// ```
1026 /// let s: &str = "123";
1027 /// let ptr: *const u8 = s.as_ptr();
1028 ///
1029 /// unsafe {
1030 /// assert_eq!('2', *ptr.add(1) as char);
1031 /// assert_eq!('3', *ptr.add(2) as char);
1032 /// }
1033 /// ```
1034 #[stable(feature = "pointer_methods", since = "1.26.0")]
1035 #[must_use = "returns a new pointer rather than modifying its argument"]
1036 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1037 #[inline(always)]
1038 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1039 pub const unsafe fn add(self, count: usize) -> Self
1040 where
1041 T: Sized,
1042 {
1043 // SAFETY: the caller must uphold the safety contract for `offset`.
1044 unsafe { intrinsics::offset(self, count) }
1045 }
1046
1047 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
1048 ///
1049 /// `count` is in units of bytes.
1050 ///
1051 /// This is purely a convenience for casting to a `u8` pointer and
1052 /// using [add][pointer::add] on it. See that method for documentation
1053 /// and safety requirements.
1054 ///
1055 /// For non-`Sized` pointees this operation changes only the data pointer,
1056 /// leaving the metadata untouched.
1057 #[must_use]
1058 #[inline(always)]
1059 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1060 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1061 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1062 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1063 pub const unsafe fn byte_add(self, count: usize) -> Self {
1064 // SAFETY: the caller must uphold the safety contract for `add`.
1065 unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1066 }
1067
1068 /// Calculates the offset from a pointer (convenience for
1069 /// `.offset((count as isize).wrapping_neg())`).
1070 ///
1071 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1072 /// offset of `3 * size_of::<T>()` bytes.
1073 ///
1074 /// # Safety
1075 ///
1076 /// If any of the following conditions are violated, the result is Undefined
1077 /// Behavior:
1078 ///
1079 /// * Both the starting and resulting pointer must be either in bounds or one
1080 /// byte past the end of the same [allocated object].
1081 ///
1082 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
1083 ///
1084 /// * The offset being in bounds cannot rely on "wrapping around" the address
1085 /// space. That is, the infinite-precision sum must fit in a usize.
1086 ///
1087 /// The compiler and standard library generally tries to ensure allocations
1088 /// never reach a size where an offset is a concern. For instance, `Vec`
1089 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
1090 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
1091 ///
1092 /// Most platforms fundamentally can't even construct such an allocation.
1093 /// For instance, no known 64-bit platform can ever serve a request
1094 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
1095 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
1096 /// more than `isize::MAX` bytes with things like Physical Address
1097 /// Extension. As such, memory acquired directly from allocators or memory
1098 /// mapped files *may* be too large to handle with this function.
1099 ///
1100 /// Consider using [`wrapping_sub`] instead if these constraints are
1101 /// difficult to satisfy. The only advantage of this method is that it
1102 /// enables more aggressive compiler optimizations.
1103 ///
1104 /// [`wrapping_sub`]: #method.wrapping_sub
1105 /// [allocated object]: crate::ptr#allocated-object
1106 ///
1107 /// # Examples
1108 ///
1109 /// ```
1110 /// let s: &str = "123";
1111 ///
1112 /// unsafe {
1113 /// let end: *const u8 = s.as_ptr().add(3);
1114 /// assert_eq!('3', *end.sub(1) as char);
1115 /// assert_eq!('2', *end.sub(2) as char);
1116 /// }
1117 /// ```
1118 #[stable(feature = "pointer_methods", since = "1.26.0")]
1119 #[must_use = "returns a new pointer rather than modifying its argument"]
1120 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1121 #[inline(always)]
1122 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1123 pub const unsafe fn sub(self, count: usize) -> Self
1124 where
1125 T: Sized,
1126 {
1127 if T::IS_ZST {
1128 // Pointer arithmetic does nothing when the pointee is a ZST.
1129 self
1130 } else {
1131 // SAFETY: the caller must uphold the safety contract for `offset`.
1132 // Because the pointee is *not* a ZST, that means that `count` is
1133 // at most `isize::MAX`, and thus the negation cannot overflow.
1134 unsafe { self.offset(intrinsics::unchecked_sub(0, count as isize)) }
1135 }
1136 }
1137
1138 /// Calculates the offset from a pointer in bytes (convenience for
1139 /// `.byte_offset((count as isize).wrapping_neg())`).
1140 ///
1141 /// `count` is in units of bytes.
1142 ///
1143 /// This is purely a convenience for casting to a `u8` pointer and
1144 /// using [sub][pointer::sub] on it. See that method for documentation
1145 /// and safety requirements.
1146 ///
1147 /// For non-`Sized` pointees this operation changes only the data pointer,
1148 /// leaving the metadata untouched.
1149 #[must_use]
1150 #[inline(always)]
1151 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1152 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1153 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1154 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1155 pub const unsafe fn byte_sub(self, count: usize) -> Self {
1156 // SAFETY: the caller must uphold the safety contract for `sub`.
1157 unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1158 }
1159
1160 /// Calculates the offset from a pointer using wrapping arithmetic.
1161 /// (convenience for `.wrapping_offset(count as isize)`)
1162 ///
1163 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1164 /// offset of `3 * size_of::<T>()` bytes.
1165 ///
1166 /// # Safety
1167 ///
1168 /// This operation itself is always safe, but using the resulting pointer is not.
1169 ///
1170 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1171 /// be used to read or write other allocated objects.
1172 ///
1173 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1174 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1175 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1176 /// `x` and `y` point into the same allocated object.
1177 ///
1178 /// Compared to [`add`], this method basically delays the requirement of staying within the
1179 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1180 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1181 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1182 /// can be optimized better and is thus preferable in performance-sensitive code.
1183 ///
1184 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1185 /// intermediate values used during the computation of the final result. For example,
1186 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1187 /// allocated object and then re-entering it later is permitted.
1188 ///
1189 /// [`add`]: #method.add
1190 /// [allocated object]: crate::ptr#allocated-object
1191 ///
1192 /// # Examples
1193 ///
1194 /// ```
1195 /// // Iterate using a raw pointer in increments of two elements
1196 /// let data = [1u8, 2, 3, 4, 5];
1197 /// let mut ptr: *const u8 = data.as_ptr();
1198 /// let step = 2;
1199 /// let end_rounded_up = ptr.wrapping_add(6);
1200 ///
1201 /// // This loop prints "1, 3, 5, "
1202 /// while ptr != end_rounded_up {
1203 /// unsafe {
1204 /// print!("{}, ", *ptr);
1205 /// }
1206 /// ptr = ptr.wrapping_add(step);
1207 /// }
1208 /// ```
1209 #[stable(feature = "pointer_methods", since = "1.26.0")]
1210 #[must_use = "returns a new pointer rather than modifying its argument"]
1211 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1212 #[inline(always)]
1213 pub const fn wrapping_add(self, count: usize) -> Self
1214 where
1215 T: Sized,
1216 {
1217 self.wrapping_offset(count as isize)
1218 }
1219
1220 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1221 /// (convenience for `.wrapping_byte_offset(count as isize)`)
1222 ///
1223 /// `count` is in units of bytes.
1224 ///
1225 /// This is purely a convenience for casting to a `u8` pointer and
1226 /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1227 ///
1228 /// For non-`Sized` pointees this operation changes only the data pointer,
1229 /// leaving the metadata untouched.
1230 #[must_use]
1231 #[inline(always)]
1232 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1233 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1234 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1235 pub const fn wrapping_byte_add(self, count: usize) -> Self {
1236 self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1237 }
1238
1239 /// Calculates the offset from a pointer using wrapping arithmetic.
1240 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
1241 ///
1242 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1243 /// offset of `3 * size_of::<T>()` bytes.
1244 ///
1245 /// # Safety
1246 ///
1247 /// This operation itself is always safe, but using the resulting pointer is not.
1248 ///
1249 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1250 /// be used to read or write other allocated objects.
1251 ///
1252 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1253 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1254 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1255 /// `x` and `y` point into the same allocated object.
1256 ///
1257 /// Compared to [`sub`], this method basically delays the requirement of staying within the
1258 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1259 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1260 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1261 /// can be optimized better and is thus preferable in performance-sensitive code.
1262 ///
1263 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1264 /// intermediate values used during the computation of the final result. For example,
1265 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1266 /// allocated object and then re-entering it later is permitted.
1267 ///
1268 /// [`sub`]: #method.sub
1269 /// [allocated object]: crate::ptr#allocated-object
1270 ///
1271 /// # Examples
1272 ///
1273 /// ```
1274 /// // Iterate using a raw pointer in increments of two elements (backwards)
1275 /// let data = [1u8, 2, 3, 4, 5];
1276 /// let mut ptr: *const u8 = data.as_ptr();
1277 /// let start_rounded_down = ptr.wrapping_sub(2);
1278 /// ptr = ptr.wrapping_add(4);
1279 /// let step = 2;
1280 /// // This loop prints "5, 3, 1, "
1281 /// while ptr != start_rounded_down {
1282 /// unsafe {
1283 /// print!("{}, ", *ptr);
1284 /// }
1285 /// ptr = ptr.wrapping_sub(step);
1286 /// }
1287 /// ```
1288 #[stable(feature = "pointer_methods", since = "1.26.0")]
1289 #[must_use = "returns a new pointer rather than modifying its argument"]
1290 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1291 #[inline(always)]
1292 pub const fn wrapping_sub(self, count: usize) -> Self
1293 where
1294 T: Sized,
1295 {
1296 self.wrapping_offset((count as isize).wrapping_neg())
1297 }
1298
1299 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1300 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
1301 ///
1302 /// `count` is in units of bytes.
1303 ///
1304 /// This is purely a convenience for casting to a `u8` pointer and
1305 /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1306 ///
1307 /// For non-`Sized` pointees this operation changes only the data pointer,
1308 /// leaving the metadata untouched.
1309 #[must_use]
1310 #[inline(always)]
1311 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1312 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1313 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1314 pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1315 self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1316 }
1317
1318 /// Reads the value from `self` without moving it. This leaves the
1319 /// memory in `self` unchanged.
1320 ///
1321 /// See [`ptr::read`] for safety concerns and examples.
1322 ///
1323 /// [`ptr::read`]: crate::ptr::read()
1324 #[stable(feature = "pointer_methods", since = "1.26.0")]
1325 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1326 #[inline(always)]
1327 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1328 pub const unsafe fn read(self) -> T
1329 where
1330 T: Sized,
1331 {
1332 // SAFETY: the caller must uphold the safety contract for ``.
1333 unsafe { read(self) }
1334 }
1335
1336 /// Performs a volatile read of the value from `self` without moving it. This
1337 /// leaves the memory in `self` unchanged.
1338 ///
1339 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1340 /// to not be elided or reordered by the compiler across other volatile
1341 /// operations.
1342 ///
1343 /// See [`ptr::read_volatile`] for safety concerns and examples.
1344 ///
1345 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1346 #[stable(feature = "pointer_methods", since = "1.26.0")]
1347 #[inline(always)]
1348 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1349 pub unsafe fn read_volatile(self) -> T
1350 where
1351 T: Sized,
1352 {
1353 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1354 unsafe { read_volatile(self) }
1355 }
1356
1357 /// Reads the value from `self` without moving it. This leaves the
1358 /// memory in `self` unchanged.
1359 ///
1360 /// Unlike `read`, the pointer may be unaligned.
1361 ///
1362 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1363 ///
1364 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1365 #[stable(feature = "pointer_methods", since = "1.26.0")]
1366 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1367 #[inline(always)]
1368 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1369 pub const unsafe fn read_unaligned(self) -> T
1370 where
1371 T: Sized,
1372 {
1373 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1374 unsafe { read_unaligned(self) }
1375 }
1376
1377 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1378 /// and destination may overlap.
1379 ///
1380 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1381 ///
1382 /// See [`ptr::copy`] for safety concerns and examples.
1383 ///
1384 /// [`ptr::copy`]: crate::ptr::copy()
1385 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1386 #[stable(feature = "pointer_methods", since = "1.26.0")]
1387 #[inline(always)]
1388 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1389 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1390 where
1391 T: Sized,
1392 {
1393 // SAFETY: the caller must uphold the safety contract for `copy`.
1394 unsafe { copy(self, dest, count) }
1395 }
1396
1397 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1398 /// and destination may *not* overlap.
1399 ///
1400 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1401 ///
1402 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1403 ///
1404 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1405 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1406 #[stable(feature = "pointer_methods", since = "1.26.0")]
1407 #[inline(always)]
1408 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1409 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1410 where
1411 T: Sized,
1412 {
1413 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1414 unsafe { copy_nonoverlapping(self, dest, count) }
1415 }
1416
1417 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1418 /// and destination may overlap.
1419 ///
1420 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1421 ///
1422 /// See [`ptr::copy`] for safety concerns and examples.
1423 ///
1424 /// [`ptr::copy`]: crate::ptr::copy()
1425 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1426 #[stable(feature = "pointer_methods", since = "1.26.0")]
1427 #[inline(always)]
1428 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1429 pub const unsafe fn copy_from(self, src: *const T, count: usize)
1430 where
1431 T: Sized,
1432 {
1433 // SAFETY: the caller must uphold the safety contract for `copy`.
1434 unsafe { copy(src, self, count) }
1435 }
1436
1437 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1438 /// and destination may *not* overlap.
1439 ///
1440 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1441 ///
1442 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1443 ///
1444 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1445 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1446 #[stable(feature = "pointer_methods", since = "1.26.0")]
1447 #[inline(always)]
1448 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1449 pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1450 where
1451 T: Sized,
1452 {
1453 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1454 unsafe { copy_nonoverlapping(src, self, count) }
1455 }
1456
1457 /// Executes the destructor (if any) of the pointed-to value.
1458 ///
1459 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1460 ///
1461 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1462 #[stable(feature = "pointer_methods", since = "1.26.0")]
1463 #[inline(always)]
1464 pub unsafe fn drop_in_place(self) {
1465 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1466 unsafe { drop_in_place(self) }
1467 }
1468
1469 /// Overwrites a memory location with the given value without reading or
1470 /// dropping the old value.
1471 ///
1472 /// See [`ptr::write`] for safety concerns and examples.
1473 ///
1474 /// [`ptr::write`]: crate::ptr::write()
1475 #[stable(feature = "pointer_methods", since = "1.26.0")]
1476 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1477 #[inline(always)]
1478 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1479 pub const unsafe fn write(self, val: T)
1480 where
1481 T: Sized,
1482 {
1483 // SAFETY: the caller must uphold the safety contract for `write`.
1484 unsafe { write(self, val) }
1485 }
1486
1487 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1488 /// bytes of memory starting at `self` to `val`.
1489 ///
1490 /// See [`ptr::write_bytes`] for safety concerns and examples.
1491 ///
1492 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1493 #[doc(alias = "memset")]
1494 #[stable(feature = "pointer_methods", since = "1.26.0")]
1495 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1496 #[inline(always)]
1497 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1498 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1499 where
1500 T: Sized,
1501 {
1502 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1503 unsafe { write_bytes(self, val, count) }
1504 }
1505
1506 /// Performs a volatile write of a memory location with the given value without
1507 /// reading or dropping the old value.
1508 ///
1509 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1510 /// to not be elided or reordered by the compiler across other volatile
1511 /// operations.
1512 ///
1513 /// See [`ptr::write_volatile`] for safety concerns and examples.
1514 ///
1515 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1516 #[stable(feature = "pointer_methods", since = "1.26.0")]
1517 #[inline(always)]
1518 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1519 pub unsafe fn write_volatile(self, val: T)
1520 where
1521 T: Sized,
1522 {
1523 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1524 unsafe { write_volatile(self, val) }
1525 }
1526
1527 /// Overwrites a memory location with the given value without reading or
1528 /// dropping the old value.
1529 ///
1530 /// Unlike `write`, the pointer may be unaligned.
1531 ///
1532 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1533 ///
1534 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1535 #[stable(feature = "pointer_methods", since = "1.26.0")]
1536 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1537 #[inline(always)]
1538 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1539 pub const unsafe fn write_unaligned(self, val: T)
1540 where
1541 T: Sized,
1542 {
1543 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1544 unsafe { write_unaligned(self, val) }
1545 }
1546
1547 /// Replaces the value at `self` with `src`, returning the old
1548 /// value, without dropping either.
1549 ///
1550 /// See [`ptr::replace`] for safety concerns and examples.
1551 ///
1552 /// [`ptr::replace`]: crate::ptr::replace()
1553 #[stable(feature = "pointer_methods", since = "1.26.0")]
1554 #[inline(always)]
1555 pub unsafe fn replace(self, src: T) -> T
1556 where
1557 T: Sized,
1558 {
1559 // SAFETY: the caller must uphold the safety contract for `replace`.
1560 unsafe { replace(self, src) }
1561 }
1562
1563 /// Swaps the values at two mutable locations of the same type, without
1564 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1565 /// otherwise equivalent.
1566 ///
1567 /// See [`ptr::swap`] for safety concerns and examples.
1568 ///
1569 /// [`ptr::swap`]: crate::ptr::swap()
1570 #[stable(feature = "pointer_methods", since = "1.26.0")]
1571 #[rustc_const_unstable(feature = "const_swap", issue = "83163")]
1572 #[inline(always)]
1573 pub const unsafe fn swap(self, with: *mut T)
1574 where
1575 T: Sized,
1576 {
1577 // SAFETY: the caller must uphold the safety contract for `swap`.
1578 unsafe { swap(self, with) }
1579 }
1580
1581 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1582 /// `align`.
1583 ///
1584 /// If it is not possible to align the pointer, the implementation returns
1585 /// `usize::MAX`.
1586 ///
1587 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1588 /// used with the `wrapping_add` method.
1589 ///
1590 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1591 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1592 /// the returned offset is correct in all terms other than alignment.
1593 ///
1594 /// When this is called during compile-time evaluation (which is unstable), the implementation
1595 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1596 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1597 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1598 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1599 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1600 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1601 /// for unstable APIs.)
1602 ///
1603 /// # Panics
1604 ///
1605 /// The function panics if `align` is not a power-of-two.
1606 ///
1607 /// # Examples
1608 ///
1609 /// Accessing adjacent `u8` as `u16`
1610 ///
1611 /// ```
1612 /// use std::mem::align_of;
1613 ///
1614 /// # unsafe {
1615 /// let mut x = [5_u8, 6, 7, 8, 9];
1616 /// let ptr = x.as_mut_ptr();
1617 /// let offset = ptr.align_offset(align_of::<u16>());
1618 ///
1619 /// if offset < x.len() - 1 {
1620 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1621 /// *u16_ptr = 0;
1622 ///
1623 /// assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1624 /// } else {
1625 /// // while the pointer can be aligned via `offset`, it would point
1626 /// // outside the allocation
1627 /// }
1628 /// # }
1629 /// ```
1630 #[must_use]
1631 #[inline]
1632 #[stable(feature = "align_offset", since = "1.36.0")]
1633 #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")]
1634 pub const fn align_offset(self, align: usize) -> usize
1635 where
1636 T: Sized,
1637 {
1638 if !align.is_power_of_two() {
1639 panic!("align_offset: align is not a power-of-two");
1640 }
1641
1642 // SAFETY: `align` has been checked to be a power of 2 above
1643 let ret = unsafe { align_offset(self, align) };
1644
1645 // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1646 #[cfg(miri)]
1647 if ret != usize::MAX {
1648 intrinsics::miri_promise_symbolic_alignment(
1649 self.wrapping_add(ret).cast_const().cast(),
1650 align,
1651 );
1652 }
1653
1654 ret
1655 }
1656
1657 /// Returns whether the pointer is properly aligned for `T`.
1658 ///
1659 /// # Examples
1660 ///
1661 /// ```
1662 /// // On some platforms, the alignment of i32 is less than 4.
1663 /// #[repr(align(4))]
1664 /// struct AlignedI32(i32);
1665 ///
1666 /// let mut data = AlignedI32(42);
1667 /// let ptr = &mut data as *mut AlignedI32;
1668 ///
1669 /// assert!(ptr.is_aligned());
1670 /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1671 /// ```
1672 ///
1673 /// # At compiletime
1674 /// **Note: Alignment at compiletime is experimental and subject to change. See the
1675 /// [tracking issue] for details.**
1676 ///
1677 /// At compiletime, the compiler may not know where a value will end up in memory.
1678 /// Calling this function on a pointer created from a reference at compiletime will only
1679 /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
1680 /// is never aligned if cast to a type with a stricter alignment than the reference's
1681 /// underlying allocation.
1682 ///
1683 /// ```
1684 /// #![feature(const_pointer_is_aligned)]
1685 /// #![feature(const_mut_refs)]
1686 ///
1687 /// // On some platforms, the alignment of primitives is less than their size.
1688 /// #[repr(align(4))]
1689 /// struct AlignedI32(i32);
1690 /// #[repr(align(8))]
1691 /// struct AlignedI64(i64);
1692 ///
1693 /// const _: () = {
1694 /// let mut data = AlignedI32(42);
1695 /// let ptr = &mut data as *mut AlignedI32;
1696 /// assert!(ptr.is_aligned());
1697 ///
1698 /// // At runtime either `ptr1` or `ptr2` would be aligned, but at compiletime neither is aligned.
1699 /// let ptr1 = ptr.cast::<AlignedI64>();
1700 /// let ptr2 = ptr.wrapping_add(1).cast::<AlignedI64>();
1701 /// assert!(!ptr1.is_aligned());
1702 /// assert!(!ptr2.is_aligned());
1703 /// };
1704 /// ```
1705 ///
1706 /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
1707 /// pointer is aligned, even if the compiletime pointer wasn't aligned.
1708 ///
1709 /// ```
1710 /// #![feature(const_pointer_is_aligned)]
1711 ///
1712 /// // On some platforms, the alignment of primitives is less than their size.
1713 /// #[repr(align(4))]
1714 /// struct AlignedI32(i32);
1715 /// #[repr(align(8))]
1716 /// struct AlignedI64(i64);
1717 ///
1718 /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned.
1719 /// // Also, note that mutable references are not allowed in the final value of constants.
1720 /// const COMPTIME_PTR: *mut AlignedI32 = (&AlignedI32(42) as *const AlignedI32).cast_mut();
1721 /// const _: () = assert!(!COMPTIME_PTR.cast::<AlignedI64>().is_aligned());
1722 /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).cast::<AlignedI64>().is_aligned());
1723 ///
1724 /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned.
1725 /// let runtime_ptr = COMPTIME_PTR;
1726 /// assert_ne!(
1727 /// runtime_ptr.cast::<AlignedI64>().is_aligned(),
1728 /// runtime_ptr.wrapping_add(1).cast::<AlignedI64>().is_aligned(),
1729 /// );
1730 /// ```
1731 ///
1732 /// If a pointer is created from a fixed address, this function behaves the same during
1733 /// runtime and compiletime.
1734 ///
1735 /// ```
1736 /// #![feature(const_pointer_is_aligned)]
1737 ///
1738 /// // On some platforms, the alignment of primitives is less than their size.
1739 /// #[repr(align(4))]
1740 /// struct AlignedI32(i32);
1741 /// #[repr(align(8))]
1742 /// struct AlignedI64(i64);
1743 ///
1744 /// const _: () = {
1745 /// let ptr = 40 as *mut AlignedI32;
1746 /// assert!(ptr.is_aligned());
1747 ///
1748 /// // For pointers with a known address, runtime and compiletime behavior are identical.
1749 /// let ptr1 = ptr.cast::<AlignedI64>();
1750 /// let ptr2 = ptr.wrapping_add(1).cast::<AlignedI64>();
1751 /// assert!(ptr1.is_aligned());
1752 /// assert!(!ptr2.is_aligned());
1753 /// };
1754 /// ```
1755 ///
1756 /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203
1757 #[must_use]
1758 #[inline]
1759 #[stable(feature = "pointer_is_aligned", since = "CURRENT_RUSTC_VERSION")]
1760 #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")]
1761 pub const fn is_aligned(self) -> bool
1762 where
1763 T: Sized,
1764 {
1765 self.is_aligned_to(mem::align_of::<T>())
1766 }
1767
1768 /// Returns whether the pointer is aligned to `align`.
1769 ///
1770 /// For non-`Sized` pointees this operation considers only the data pointer,
1771 /// ignoring the metadata.
1772 ///
1773 /// # Panics
1774 ///
1775 /// The function panics if `align` is not a power-of-two (this includes 0).
1776 ///
1777 /// # Examples
1778 ///
1779 /// ```
1780 /// #![feature(pointer_is_aligned_to)]
1781 ///
1782 /// // On some platforms, the alignment of i32 is less than 4.
1783 /// #[repr(align(4))]
1784 /// struct AlignedI32(i32);
1785 ///
1786 /// let mut data = AlignedI32(42);
1787 /// let ptr = &mut data as *mut AlignedI32;
1788 ///
1789 /// assert!(ptr.is_aligned_to(1));
1790 /// assert!(ptr.is_aligned_to(2));
1791 /// assert!(ptr.is_aligned_to(4));
1792 ///
1793 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1794 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1795 ///
1796 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1797 /// ```
1798 ///
1799 /// # At compiletime
1800 /// **Note: Alignment at compiletime is experimental and subject to change. See the
1801 /// [tracking issue] for details.**
1802 ///
1803 /// At compiletime, the compiler may not know where a value will end up in memory.
1804 /// Calling this function on a pointer created from a reference at compiletime will only
1805 /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
1806 /// cannot be stricter aligned than the reference's underlying allocation.
1807 ///
1808 /// ```
1809 /// #![feature(pointer_is_aligned_to)]
1810 /// #![feature(const_pointer_is_aligned)]
1811 /// #![feature(const_mut_refs)]
1812 ///
1813 /// // On some platforms, the alignment of i32 is less than 4.
1814 /// #[repr(align(4))]
1815 /// struct AlignedI32(i32);
1816 ///
1817 /// const _: () = {
1818 /// let mut data = AlignedI32(42);
1819 /// let ptr = &mut data as *mut AlignedI32;
1820 ///
1821 /// assert!(ptr.is_aligned_to(1));
1822 /// assert!(ptr.is_aligned_to(2));
1823 /// assert!(ptr.is_aligned_to(4));
1824 ///
1825 /// // At compiletime, we know for sure that the pointer isn't aligned to 8.
1826 /// assert!(!ptr.is_aligned_to(8));
1827 /// assert!(!ptr.wrapping_add(1).is_aligned_to(8));
1828 /// };
1829 /// ```
1830 ///
1831 /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
1832 /// pointer is aligned, even if the compiletime pointer wasn't aligned.
1833 ///
1834 /// ```
1835 /// #![feature(pointer_is_aligned_to)]
1836 /// #![feature(const_pointer_is_aligned)]
1837 ///
1838 /// // On some platforms, the alignment of i32 is less than 4.
1839 /// #[repr(align(4))]
1840 /// struct AlignedI32(i32);
1841 ///
1842 /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned.
1843 /// // Also, note that mutable references are not allowed in the final value of constants.
1844 /// const COMPTIME_PTR: *mut AlignedI32 = (&AlignedI32(42) as *const AlignedI32).cast_mut();
1845 /// const _: () = assert!(!COMPTIME_PTR.is_aligned_to(8));
1846 /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).is_aligned_to(8));
1847 ///
1848 /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned.
1849 /// let runtime_ptr = COMPTIME_PTR;
1850 /// assert_ne!(
1851 /// runtime_ptr.is_aligned_to(8),
1852 /// runtime_ptr.wrapping_add(1).is_aligned_to(8),
1853 /// );
1854 /// ```
1855 ///
1856 /// If a pointer is created from a fixed address, this function behaves the same during
1857 /// runtime and compiletime.
1858 ///
1859 /// ```
1860 /// #![feature(pointer_is_aligned_to)]
1861 /// #![feature(const_pointer_is_aligned)]
1862 ///
1863 /// const _: () = {
1864 /// let ptr = 40 as *mut u8;
1865 /// assert!(ptr.is_aligned_to(1));
1866 /// assert!(ptr.is_aligned_to(2));
1867 /// assert!(ptr.is_aligned_to(4));
1868 /// assert!(ptr.is_aligned_to(8));
1869 /// assert!(!ptr.is_aligned_to(16));
1870 /// };
1871 /// ```
1872 ///
1873 /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203
1874 #[must_use]
1875 #[inline]
1876 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1877 #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")]
1878 pub const fn is_aligned_to(self, align: usize) -> bool {
1879 if !align.is_power_of_two() {
1880 panic!("is_aligned_to: align is not a power-of-two");
1881 }
1882
1883 #[inline]
1884 fn runtime_impl(ptr: *mut (), align: usize) -> bool {
1885 ptr.addr() & (align - 1) == 0
1886 }
1887
1888 #[inline]
1889 const fn const_impl(ptr: *mut (), align: usize) -> bool {
1890 // We can't use the address of `self` in a `const fn`, so we use `align_offset` instead.
1891 ptr.align_offset(align) == 0
1892 }
1893
1894 // The cast to `()` is used to
1895 // 1. deal with fat pointers; and
1896 // 2. ensure that `align_offset` (in `const_impl`) doesn't actually try to compute an offset.
1897 const_eval_select((self.cast::<()>(), align), const_impl, runtime_impl)
1898 }
1899}
1900
1901impl<T> *mut [T] {
1902 /// Returns the length of a raw slice.
1903 ///
1904 /// The returned value is the number of **elements**, not the number of bytes.
1905 ///
1906 /// This function is safe, even when the raw slice cannot be cast to a slice
1907 /// reference because the pointer is null or unaligned.
1908 ///
1909 /// # Examples
1910 ///
1911 /// ```rust
1912 /// use std::ptr;
1913 ///
1914 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1915 /// assert_eq!(slice.len(), 3);
1916 /// ```
1917 #[inline(always)]
1918 #[stable(feature = "slice_ptr_len", since = "CURRENT_RUSTC_VERSION")]
1919 #[rustc_const_stable(feature = "const_slice_ptr_len", since = "CURRENT_RUSTC_VERSION")]
1920 #[rustc_allow_const_fn_unstable(ptr_metadata)]
1921 pub const fn len(self) -> usize {
1922 metadata(self)
1923 }
1924
1925 /// Returns `true` if the raw slice has a length of 0.
1926 ///
1927 /// # Examples
1928 ///
1929 /// ```
1930 /// use std::ptr;
1931 ///
1932 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1933 /// assert!(!slice.is_empty());
1934 /// ```
1935 #[inline(always)]
1936 #[stable(feature = "slice_ptr_len", since = "CURRENT_RUSTC_VERSION")]
1937 #[rustc_const_stable(feature = "const_slice_ptr_len", since = "CURRENT_RUSTC_VERSION")]
1938 pub const fn is_empty(self) -> bool {
1939 self.len() == 0
1940 }
1941
1942 /// Divides one mutable raw slice into two at an index.
1943 ///
1944 /// The first will contain all indices from `[0, mid)` (excluding
1945 /// the index `mid` itself) and the second will contain all
1946 /// indices from `[mid, len)` (excluding the index `len` itself).
1947 ///
1948 /// # Panics
1949 ///
1950 /// Panics if `mid > len`.
1951 ///
1952 /// # Safety
1953 ///
1954 /// `mid` must be [in-bounds] of the underlying [allocated object].
1955 /// Which means `self` must be dereferenceable and span a single allocation
1956 /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1957 /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1958 ///
1959 /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1960 /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1961 /// The explicit bounds check is only as useful as `len` is correct.
1962 ///
1963 /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1964 /// [in-bounds]: #method.add
1965 /// [allocated object]: crate::ptr#allocated-object
1966 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1967 ///
1968 /// # Examples
1969 ///
1970 /// ```
1971 /// #![feature(raw_slice_split)]
1972 /// #![feature(slice_ptr_get)]
1973 ///
1974 /// let mut v = [1, 0, 3, 0, 5, 6];
1975 /// let ptr = &mut v as *mut [_];
1976 /// unsafe {
1977 /// let (left, right) = ptr.split_at_mut(2);
1978 /// assert_eq!(&*left, [1, 0]);
1979 /// assert_eq!(&*right, [3, 0, 5, 6]);
1980 /// }
1981 /// ```
1982 #[inline(always)]
1983 #[track_caller]
1984 #[unstable(feature = "raw_slice_split", issue = "95595")]
1985 pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1986 assert!(mid <= self.len());
1987 // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1988 // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1989 unsafe { self.split_at_mut_unchecked(mid) }
1990 }
1991
1992 /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1993 ///
1994 /// The first will contain all indices from `[0, mid)` (excluding
1995 /// the index `mid` itself) and the second will contain all
1996 /// indices from `[mid, len)` (excluding the index `len` itself).
1997 ///
1998 /// # Safety
1999 ///
2000 /// `mid` must be [in-bounds] of the underlying [allocated object].
2001 /// Which means `self` must be dereferenceable and span a single allocation
2002 /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
2003 /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
2004 ///
2005 /// [in-bounds]: #method.add
2006 /// [out-of-bounds index]: #method.add
2007 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
2008 ///
2009 /// # Examples
2010 ///
2011 /// ```
2012 /// #![feature(raw_slice_split)]
2013 ///
2014 /// let mut v = [1, 0, 3, 0, 5, 6];
2015 /// // scoped to restrict the lifetime of the borrows
2016 /// unsafe {
2017 /// let ptr = &mut v as *mut [_];
2018 /// let (left, right) = ptr.split_at_mut_unchecked(2);
2019 /// assert_eq!(&*left, [1, 0]);
2020 /// assert_eq!(&*right, [3, 0, 5, 6]);
2021 /// (&mut *left)[1] = 2;
2022 /// (&mut *right)[1] = 4;
2023 /// }
2024 /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
2025 /// ```
2026 #[inline(always)]
2027 #[unstable(feature = "raw_slice_split", issue = "95595")]
2028 pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
2029 let len = self.len();
2030 let ptr = self.as_mut_ptr();
2031
2032 // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
2033 let tail = unsafe { ptr.add(mid) };
2034 (
2035 crate::ptr::slice_from_raw_parts_mut(ptr, mid),
2036 crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
2037 )
2038 }
2039
2040 /// Returns a raw pointer to the slice's buffer.
2041 ///
2042 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2043 ///
2044 /// # Examples
2045 ///
2046 /// ```rust
2047 /// #![feature(slice_ptr_get)]
2048 /// use std::ptr;
2049 ///
2050 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
2051 /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
2052 /// ```
2053 #[inline(always)]
2054 #[unstable(feature = "slice_ptr_get", issue = "74265")]
2055 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
2056 pub const fn as_mut_ptr(self) -> *mut T {
2057 self as *mut T
2058 }
2059
2060 /// Returns a raw pointer to an element or subslice, without doing bounds
2061 /// checking.
2062 ///
2063 /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
2064 /// is *[undefined behavior]* even if the resulting pointer is not used.
2065 ///
2066 /// [out-of-bounds index]: #method.add
2067 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
2068 ///
2069 /// # Examples
2070 ///
2071 /// ```
2072 /// #![feature(slice_ptr_get)]
2073 ///
2074 /// let x = &mut [1, 2, 4] as *mut [i32];
2075 ///
2076 /// unsafe {
2077 /// assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
2078 /// }
2079 /// ```
2080 #[unstable(feature = "slice_ptr_get", issue = "74265")]
2081 #[inline(always)]
2082 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
2083 where
2084 I: SliceIndex<[T]>,
2085 {
2086 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
2087 unsafe { index.get_unchecked_mut(self) }
2088 }
2089
2090 /// Returns `None` if the pointer is null, or else returns a shared slice to
2091 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
2092 /// that the value has to be initialized.
2093 ///
2094 /// For the mutable counterpart see [`as_uninit_slice_mut`].
2095 ///
2096 /// [`as_ref`]: pointer#method.as_ref-1
2097 /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
2098 ///
2099 /// # Safety
2100 ///
2101 /// When calling this method, you have to ensure that *either* the pointer is null *or*
2102 /// all of the following is true:
2103 ///
2104 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
2105 /// and it must be properly aligned. This means in particular:
2106 ///
2107 /// * The entire memory range of this slice must be contained within a single [allocated object]!
2108 /// Slices can never span across multiple allocated objects.
2109 ///
2110 /// * The pointer must be aligned even for zero-length slices. One
2111 /// reason for this is that enum layout optimizations may rely on references
2112 /// (including slices of any length) being aligned and non-null to distinguish
2113 /// them from other data. You can obtain a pointer that is usable as `data`
2114 /// for zero-length slices using [`NonNull::dangling()`].
2115 ///
2116 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
2117 /// See the safety documentation of [`pointer::offset`].
2118 ///
2119 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
2120 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
2121 /// In particular, while this reference exists, the memory the pointer points to must
2122 /// not get mutated (except inside `UnsafeCell`).
2123 ///
2124 /// This applies even if the result of this method is unused!
2125 ///
2126 /// See also [`slice::from_raw_parts`][].
2127 ///
2128 /// [valid]: crate::ptr#safety
2129 /// [allocated object]: crate::ptr#allocated-object
2130 #[inline]
2131 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
2132 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
2133 pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
2134 if self.is_null() {
2135 None
2136 } else {
2137 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
2138 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
2139 }
2140 }
2141
2142 /// Returns `None` if the pointer is null, or else returns a unique slice to
2143 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
2144 /// that the value has to be initialized.
2145 ///
2146 /// For the shared counterpart see [`as_uninit_slice`].
2147 ///
2148 /// [`as_mut`]: #method.as_mut
2149 /// [`as_uninit_slice`]: #method.as_uninit_slice-1
2150 ///
2151 /// # Safety
2152 ///
2153 /// When calling this method, you have to ensure that *either* the pointer is null *or*
2154 /// all of the following is true:
2155 ///
2156 /// * The pointer must be [valid] for reads and writes for `ptr.len() * mem::size_of::<T>()`
2157 /// many bytes, and it must be properly aligned. This means in particular:
2158 ///
2159 /// * The entire memory range of this slice must be contained within a single [allocated object]!
2160 /// Slices can never span across multiple allocated objects.
2161 ///
2162 /// * The pointer must be aligned even for zero-length slices. One
2163 /// reason for this is that enum layout optimizations may rely on references
2164 /// (including slices of any length) being aligned and non-null to distinguish
2165 /// them from other data. You can obtain a pointer that is usable as `data`
2166 /// for zero-length slices using [`NonNull::dangling()`].
2167 ///
2168 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
2169 /// See the safety documentation of [`pointer::offset`].
2170 ///
2171 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
2172 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
2173 /// In particular, while this reference exists, the memory the pointer points to must
2174 /// not get accessed (read or written) through any other pointer.
2175 ///
2176 /// This applies even if the result of this method is unused!
2177 ///
2178 /// See also [`slice::from_raw_parts_mut`][].
2179 ///
2180 /// [valid]: crate::ptr#safety
2181 /// [allocated object]: crate::ptr#allocated-object
2182 #[inline]
2183 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
2184 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
2185 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
2186 if self.is_null() {
2187 None
2188 } else {
2189 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
2190 Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
2191 }
2192 }
2193}
2194
2195impl<T, const N: usize> *mut [T; N] {
2196 /// Returns a raw pointer to the array's buffer.
2197 ///
2198 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2199 ///
2200 /// # Examples
2201 ///
2202 /// ```rust
2203 /// #![feature(array_ptr_get)]
2204 /// use std::ptr;
2205 ///
2206 /// let arr: *mut [i8; 3] = ptr::null_mut();
2207 /// assert_eq!(arr.as_mut_ptr(), ptr::null_mut());
2208 /// ```
2209 #[inline]
2210 #[unstable(feature = "array_ptr_get", issue = "119834")]
2211 #[rustc_const_unstable(feature = "array_ptr_get", issue = "119834")]
2212 pub const fn as_mut_ptr(self) -> *mut T {
2213 self as *mut T
2214 }
2215
2216 /// Returns a raw pointer to a mutable slice containing the entire array.
2217 ///
2218 /// # Examples
2219 ///
2220 /// ```
2221 /// #![feature(array_ptr_get)]
2222 ///
2223 /// let mut arr = [1, 2, 5];
2224 /// let ptr: *mut [i32; 3] = &mut arr;
2225 /// unsafe {
2226 /// (&mut *ptr.as_mut_slice())[..2].copy_from_slice(&[3, 4]);
2227 /// }
2228 /// assert_eq!(arr, [3, 4, 5]);
2229 /// ```
2230 #[inline]
2231 #[unstable(feature = "array_ptr_get", issue = "119834")]
2232 #[rustc_const_unstable(feature = "array_ptr_get", issue = "119834")]
2233 pub const fn as_mut_slice(self) -> *mut [T] {
2234 self
2235 }
2236}
2237
2238// Equality for pointers
2239#[stable(feature = "rust1", since = "1.0.0")]
2240impl<T: ?Sized> PartialEq for *mut T {
2241 #[inline(always)]
2242 #[allow(ambiguous_wide_pointer_comparisons)]
2243 fn eq(&self, other: &*mut T) -> bool {
2244 *self == *other
2245 }
2246}
2247
2248#[stable(feature = "rust1", since = "1.0.0")]
2249impl<T: ?Sized> Eq for *mut T {}
2250
2251#[stable(feature = "rust1", since = "1.0.0")]
2252impl<T: ?Sized> Ord for *mut T {
2253 #[inline]
2254 #[allow(ambiguous_wide_pointer_comparisons)]
2255 fn cmp(&self, other: &*mut T) -> Ordering {
2256 if self < other {
2257 Less
2258 } else if self == other {
2259 Equal
2260 } else {
2261 Greater
2262 }
2263 }
2264}
2265
2266#[stable(feature = "rust1", since = "1.0.0")]
2267impl<T: ?Sized> PartialOrd for *mut T {
2268 #[inline(always)]
2269 #[allow(ambiguous_wide_pointer_comparisons)]
2270 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2271 Some(self.cmp(other))
2272 }
2273
2274 #[inline(always)]
2275 #[allow(ambiguous_wide_pointer_comparisons)]
2276 fn lt(&self, other: &*mut T) -> bool {
2277 *self < *other
2278 }
2279
2280 #[inline(always)]
2281 #[allow(ambiguous_wide_pointer_comparisons)]
2282 fn le(&self, other: &*mut T) -> bool {
2283 *self <= *other
2284 }
2285
2286 #[inline(always)]
2287 #[allow(ambiguous_wide_pointer_comparisons)]
2288 fn gt(&self, other: &*mut T) -> bool {
2289 *self > *other
2290 }
2291
2292 #[inline(always)]
2293 #[allow(ambiguous_wide_pointer_comparisons)]
2294 fn ge(&self, other: &*mut T) -> bool {
2295 *self >= *other
2296 }
2297}
2298