1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::SizedTypeProperties;
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *const T {
8 /// Returns `true` if the pointer is null.
9 ///
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
13 /// each other.
14 ///
15 /// ## Behavior during const evaluation
16 ///
17 /// When this function is used during const evaluation, it may return `false` for pointers
18 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
19 /// is offset beyond its bounds in such a way that the resulting pointer is null,
20 /// the function will still return `false`. There is no way for CTFE to know
21 /// the absolute position of that memory, so we cannot tell if the pointer is
22 /// null or not.
23 ///
24 /// # Examples
25 ///
26 /// ```
27 /// let s: &str = "Follow the rabbit";
28 /// let ptr: *const u8 = s.as_ptr();
29 /// assert!(!ptr.is_null());
30 /// ```
31 #[stable(feature = "rust1", since = "1.0.0")]
32 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
33 #[rustc_diagnostic_item = "ptr_const_is_null"]
34 #[inline]
35 pub const fn is_null(self) -> bool {
36 #[inline]
37 fn runtime_impl(ptr: *const u8) -> bool {
38 ptr.addr() == 0
39 }
40
41 #[inline]
42 const fn const_impl(ptr: *const u8) -> bool {
43 // Compare via a cast to a thin pointer, so fat pointers are only
44 // considering their "data" part for null-ness.
45 match (ptr).guaranteed_eq(null_mut()) {
46 None => false,
47 Some(res) => res,
48 }
49 }
50
51 // SAFETY: The two versions are equivalent at runtime.
52 unsafe { const_eval_select((self as *const u8,), const_impl, runtime_impl) }
53 }
54
55 /// Casts to a pointer of another type.
56 #[stable(feature = "ptr_cast", since = "1.38.0")]
57 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
58 #[rustc_diagnostic_item = "const_ptr_cast"]
59 #[inline(always)]
60 pub const fn cast<U>(self) -> *const U {
61 self as _
62 }
63
64 /// Use the pointer value in a new pointer of another type.
65 ///
66 /// In case `meta` is a (fat) pointer to an unsized type, this operation
67 /// will ignore the pointer part, whereas for (thin) pointers to sized
68 /// types, this has the same effect as a simple cast.
69 ///
70 /// The resulting pointer will have provenance of `self`, i.e., for a fat
71 /// pointer, this operation is semantically the same as creating a new
72 /// fat pointer with the data pointer value of `self` but the metadata of
73 /// `meta`.
74 ///
75 /// # Examples
76 ///
77 /// This function is primarily useful for allowing byte-wise pointer
78 /// arithmetic on potentially fat pointers:
79 ///
80 /// ```
81 /// #![feature(set_ptr_value)]
82 /// # use core::fmt::Debug;
83 /// let arr: [i32; 3] = [1, 2, 3];
84 /// let mut ptr = arr.as_ptr() as *const dyn Debug;
85 /// let thin = ptr as *const u8;
86 /// unsafe {
87 /// ptr = thin.add(8).with_metadata_of(ptr);
88 /// # assert_eq!(*(ptr as *const i32), 3);
89 /// println!("{:?}", &*ptr); // will print "3"
90 /// }
91 /// ```
92 #[unstable(feature = "set_ptr_value", issue = "75091")]
93 #[rustc_const_unstable(feature = "set_ptr_value", issue = "75091")]
94 #[must_use = "returns a new pointer rather than modifying its argument"]
95 #[inline]
96 pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
97 where
98 U: ?Sized,
99 {
100 from_raw_parts::<U>(self as *const (), metadata(meta))
101 }
102
103 /// Changes constness without changing the type.
104 ///
105 /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
106 /// refactored.
107 #[stable(feature = "ptr_const_cast", since = "1.65.0")]
108 #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
109 #[rustc_diagnostic_item = "ptr_cast_mut"]
110 #[inline(always)]
111 pub const fn cast_mut(self) -> *mut T {
112 self as _
113 }
114
115 /// Casts a pointer to its raw bits.
116 ///
117 /// This is equivalent to `as usize`, but is more specific to enhance readability.
118 /// The inverse method is [`from_bits`](#method.from_bits).
119 ///
120 /// In particular, `*p as usize` and `p as usize` will both compile for
121 /// pointers to numeric types but do very different things, so using this
122 /// helps emphasize that reading the bits was intentional.
123 ///
124 /// # Examples
125 ///
126 /// ```
127 /// #![feature(ptr_to_from_bits)]
128 /// # #[cfg(not(miri))] { // doctest does not work with strict provenance
129 /// let array = [13, 42];
130 /// let p0: *const i32 = &array[0];
131 /// assert_eq!(<*const _>::from_bits(p0.to_bits()), p0);
132 /// let p1: *const i32 = &array[1];
133 /// assert_eq!(p1.to_bits() - p0.to_bits(), 4);
134 /// # }
135 /// ```
136 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
137 #[deprecated(
138 since = "1.67.0",
139 note = "replaced by the `expose_addr` method, or update your code \
140 to follow the strict provenance rules using its APIs"
141 )]
142 #[inline(always)]
143 pub fn to_bits(self) -> usize
144 where
145 T: Sized,
146 {
147 self as usize
148 }
149
150 /// Creates a pointer from its raw bits.
151 ///
152 /// This is equivalent to `as *const T`, but is more specific to enhance readability.
153 /// The inverse method is [`to_bits`](#method.to_bits).
154 ///
155 /// # Examples
156 ///
157 /// ```
158 /// #![feature(ptr_to_from_bits)]
159 /// # #[cfg(not(miri))] { // doctest does not work with strict provenance
160 /// use std::ptr::NonNull;
161 /// let dangling: *const u8 = NonNull::dangling().as_ptr();
162 /// assert_eq!(<*const u8>::from_bits(1), dangling);
163 /// # }
164 /// ```
165 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
166 #[deprecated(
167 since = "1.67.0",
168 note = "replaced by the `ptr::from_exposed_addr` function, or update \
169 your code to follow the strict provenance rules using its APIs"
170 )]
171 #[allow(fuzzy_provenance_casts)] // this is an unstable and semi-deprecated cast function
172 #[inline(always)]
173 pub fn from_bits(bits: usize) -> Self
174 where
175 T: Sized,
176 {
177 bits as Self
178 }
179
180 /// Gets the "address" portion of the pointer.
181 ///
182 /// This is similar to `self as usize`, which semantically discards *provenance* and
183 /// *address-space* information. However, unlike `self as usize`, casting the returned address
184 /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To
185 /// properly restore the lost information and obtain a dereferenceable pointer, use
186 /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
187 ///
188 /// If using those APIs is not possible because there is no way to preserve a pointer with the
189 /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
190 /// or [`expose_addr`][pointer::expose_addr] and [`from_exposed_addr`][from_exposed_addr]
191 /// instead. However, note that this makes your code less portable and less amenable to tools
192 /// that check for compliance with the Rust memory model.
193 ///
194 /// On most platforms this will produce a value with the same bytes as the original
195 /// pointer, because all the bytes are dedicated to describing the address.
196 /// Platforms which need to store additional information in the pointer may
197 /// perform a change of representation to produce a value containing only the address
198 /// portion of the pointer. What that means is up to the platform to define.
199 ///
200 /// This API and its claimed semantics are part of the Strict Provenance experiment, and as such
201 /// might change in the future (including possibly weakening this so it becomes wholly
202 /// equivalent to `self as usize`). See the [module documentation][crate::ptr] for details.
203 #[must_use]
204 #[inline(always)]
205 #[unstable(feature = "strict_provenance", issue = "95228")]
206 pub fn addr(self) -> usize {
207 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
208 // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
209 // provenance).
210 unsafe { mem::transmute(self.cast::<()>()) }
211 }
212
213 /// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future
214 /// use in [`from_exposed_addr`][].
215 ///
216 /// This is equivalent to `self as usize`, which semantically discards *provenance* and
217 /// *address-space* information. Furthermore, this (like the `as` cast) has the implicit
218 /// side-effect of marking the provenance as 'exposed', so on platforms that support it you can
219 /// later call [`from_exposed_addr`][] to reconstitute the original pointer including its
220 /// provenance. (Reconstructing address space information, if required, is your responsibility.)
221 ///
222 /// Using this method means that code is *not* following [Strict
223 /// Provenance][super#strict-provenance] rules. Supporting
224 /// [`from_exposed_addr`][] complicates specification and reasoning and may not be supported by
225 /// tools that help you to stay conformant with the Rust memory model, so it is recommended to
226 /// use [`addr`][pointer::addr] wherever possible.
227 ///
228 /// On most platforms this will produce a value with the same bytes as the original pointer,
229 /// because all the bytes are dedicated to describing the address. Platforms which need to store
230 /// additional information in the pointer may not support this operation, since the 'expose'
231 /// side-effect which is required for [`from_exposed_addr`][] to work is typically not
232 /// available.
233 ///
234 /// It is unclear whether this method can be given a satisfying unambiguous specification. This
235 /// API and its claimed semantics are part of [Exposed Provenance][super#exposed-provenance].
236 ///
237 /// [`from_exposed_addr`]: from_exposed_addr
238 #[must_use]
239 #[inline(always)]
240 #[unstable(feature = "exposed_provenance", issue = "95228")]
241 pub fn expose_addr(self) -> usize {
242 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
243 self.cast::<()>() as usize
244 }
245
246 /// Creates a new pointer with the given address.
247 ///
248 /// This performs the same operation as an `addr as ptr` cast, but copies
249 /// the *address-space* and *provenance* of `self` to the new pointer.
250 /// This allows us to dynamically preserve and propagate this important
251 /// information in a way that is otherwise impossible with a unary cast.
252 ///
253 /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
254 /// `self` to the given address, and therefore has all the same capabilities and restrictions.
255 ///
256 /// This API and its claimed semantics are part of the Strict Provenance experiment,
257 /// see the [module documentation][crate::ptr] for details.
258 #[must_use]
259 #[inline]
260 #[unstable(feature = "strict_provenance", issue = "95228")]
261 pub fn with_addr(self, addr: usize) -> Self {
262 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
263 //
264 // In the mean-time, this operation is defined to be "as if" it was
265 // a wrapping_offset, so we can emulate it as such. This should properly
266 // restore pointer provenance even under today's compiler.
267 let self_addr = self.addr() as isize;
268 let dest_addr = addr as isize;
269 let offset = dest_addr.wrapping_sub(self_addr);
270
271 // This is the canonical desugaring of this operation
272 self.wrapping_byte_offset(offset)
273 }
274
275 /// Creates a new pointer by mapping `self`'s address to a new one.
276 ///
277 /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
278 ///
279 /// This API and its claimed semantics are part of the Strict Provenance experiment,
280 /// see the [module documentation][crate::ptr] for details.
281 #[must_use]
282 #[inline]
283 #[unstable(feature = "strict_provenance", issue = "95228")]
284 pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
285 self.with_addr(f(self.addr()))
286 }
287
288 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
289 ///
290 /// The pointer can be later reconstructed with [`from_raw_parts`].
291 #[unstable(feature = "ptr_metadata", issue = "81513")]
292 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
293 #[inline]
294 pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
295 (self.cast(), metadata(self))
296 }
297
298 /// Returns `None` if the pointer is null, or else returns a shared reference to
299 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
300 /// must be used instead.
301 ///
302 /// [`as_uninit_ref`]: #method.as_uninit_ref
303 ///
304 /// # Safety
305 ///
306 /// When calling this method, you have to ensure that *either* the pointer is null *or*
307 /// all of the following is true:
308 ///
309 /// * The pointer must be properly aligned.
310 ///
311 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
312 ///
313 /// * The pointer must point to an initialized instance of `T`.
314 ///
315 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
316 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
317 /// In particular, while this reference exists, the memory the pointer points to must
318 /// not get mutated (except inside `UnsafeCell`).
319 ///
320 /// This applies even if the result of this method is unused!
321 /// (The part about being initialized is not yet fully decided, but until
322 /// it is, the only safe approach is to ensure that they are indeed initialized.)
323 ///
324 /// [the module documentation]: crate::ptr#safety
325 ///
326 /// # Examples
327 ///
328 /// ```
329 /// let ptr: *const u8 = &10u8 as *const u8;
330 ///
331 /// unsafe {
332 /// if let Some(val_back) = ptr.as_ref() {
333 /// println!("We got back the value: {val_back}!");
334 /// }
335 /// }
336 /// ```
337 ///
338 /// # Null-unchecked version
339 ///
340 /// If you are sure the pointer can never be null and are looking for some kind of
341 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
342 /// dereference the pointer directly.
343 ///
344 /// ```
345 /// let ptr: *const u8 = &10u8 as *const u8;
346 ///
347 /// unsafe {
348 /// let val_back = &*ptr;
349 /// println!("We got back the value: {val_back}!");
350 /// }
351 /// ```
352 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
353 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
354 #[inline]
355 pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
356 // SAFETY: the caller must guarantee that `self` is valid
357 // for a reference if it isn't null.
358 if self.is_null() { None } else { unsafe { Some(&*self) } }
359 }
360
361 /// Returns `None` if the pointer is null, or else returns a shared reference to
362 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
363 /// that the value has to be initialized.
364 ///
365 /// [`as_ref`]: #method.as_ref
366 ///
367 /// # Safety
368 ///
369 /// When calling this method, you have to ensure that *either* the pointer is null *or*
370 /// all of the following is true:
371 ///
372 /// * The pointer must be properly aligned.
373 ///
374 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
375 ///
376 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
377 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
378 /// In particular, while this reference exists, the memory the pointer points to must
379 /// not get mutated (except inside `UnsafeCell`).
380 ///
381 /// This applies even if the result of this method is unused!
382 ///
383 /// [the module documentation]: crate::ptr#safety
384 ///
385 /// # Examples
386 ///
387 /// ```
388 /// #![feature(ptr_as_uninit)]
389 ///
390 /// let ptr: *const u8 = &10u8 as *const u8;
391 ///
392 /// unsafe {
393 /// if let Some(val_back) = ptr.as_uninit_ref() {
394 /// println!("We got back the value: {}!", val_back.assume_init());
395 /// }
396 /// }
397 /// ```
398 #[inline]
399 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
400 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
401 pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
402 where
403 T: Sized,
404 {
405 // SAFETY: the caller must guarantee that `self` meets all the
406 // requirements for a reference.
407 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
408 }
409
410 /// Calculates the offset from a pointer.
411 ///
412 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
413 /// offset of `3 * size_of::<T>()` bytes.
414 ///
415 /// # Safety
416 ///
417 /// If any of the following conditions are violated, the result is Undefined
418 /// Behavior:
419 ///
420 /// * Both the starting and resulting pointer must be either in bounds or one
421 /// byte past the end of the same [allocated object].
422 ///
423 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
424 ///
425 /// * The offset being in bounds cannot rely on "wrapping around" the address
426 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
427 ///
428 /// The compiler and standard library generally tries to ensure allocations
429 /// never reach a size where an offset is a concern. For instance, `Vec`
430 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
431 /// `vec.as_ptr().add(vec.len())` is always safe.
432 ///
433 /// Most platforms fundamentally can't even construct such an allocation.
434 /// For instance, no known 64-bit platform can ever serve a request
435 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
436 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
437 /// more than `isize::MAX` bytes with things like Physical Address
438 /// Extension. As such, memory acquired directly from allocators or memory
439 /// mapped files *may* be too large to handle with this function.
440 ///
441 /// Consider using [`wrapping_offset`] instead if these constraints are
442 /// difficult to satisfy. The only advantage of this method is that it
443 /// enables more aggressive compiler optimizations.
444 ///
445 /// [`wrapping_offset`]: #method.wrapping_offset
446 /// [allocated object]: crate::ptr#allocated-object
447 ///
448 /// # Examples
449 ///
450 /// ```
451 /// let s: &str = "123";
452 /// let ptr: *const u8 = s.as_ptr();
453 ///
454 /// unsafe {
455 /// println!("{}", *ptr.offset(1) as char);
456 /// println!("{}", *ptr.offset(2) as char);
457 /// }
458 /// ```
459 #[stable(feature = "rust1", since = "1.0.0")]
460 #[must_use = "returns a new pointer rather than modifying its argument"]
461 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
462 #[inline(always)]
463 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
464 pub const unsafe fn offset(self, count: isize) -> *const T
465 where
466 T: Sized,
467 {
468 // SAFETY: the caller must uphold the safety contract for `offset`.
469 unsafe { intrinsics::offset(self, count) }
470 }
471
472 /// Calculates the offset from a pointer in bytes.
473 ///
474 /// `count` is in units of **bytes**.
475 ///
476 /// This is purely a convenience for casting to a `u8` pointer and
477 /// using [offset][pointer::offset] on it. See that method for documentation
478 /// and safety requirements.
479 ///
480 /// For non-`Sized` pointees this operation changes only the data pointer,
481 /// leaving the metadata untouched.
482 #[must_use]
483 #[inline(always)]
484 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
485 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
486 #[rustc_allow_const_fn_unstable(set_ptr_value)]
487 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
488 pub const unsafe fn byte_offset(self, count: isize) -> Self {
489 // SAFETY: the caller must uphold the safety contract for `offset`.
490 unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
491 }
492
493 /// Calculates the offset from a pointer using wrapping arithmetic.
494 ///
495 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
496 /// offset of `3 * size_of::<T>()` bytes.
497 ///
498 /// # Safety
499 ///
500 /// This operation itself is always safe, but using the resulting pointer is not.
501 ///
502 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
503 /// be used to read or write other allocated objects.
504 ///
505 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
506 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
507 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
508 /// `x` and `y` point into the same allocated object.
509 ///
510 /// Compared to [`offset`], this method basically delays the requirement of staying within the
511 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
512 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
513 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
514 /// can be optimized better and is thus preferable in performance-sensitive code.
515 ///
516 /// The delayed check only considers the value of the pointer that was dereferenced, not the
517 /// intermediate values used during the computation of the final result. For example,
518 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
519 /// words, leaving the allocated object and then re-entering it later is permitted.
520 ///
521 /// [`offset`]: #method.offset
522 /// [allocated object]: crate::ptr#allocated-object
523 ///
524 /// # Examples
525 ///
526 /// ```
527 /// // Iterate using a raw pointer in increments of two elements
528 /// let data = [1u8, 2, 3, 4, 5];
529 /// let mut ptr: *const u8 = data.as_ptr();
530 /// let step = 2;
531 /// let end_rounded_up = ptr.wrapping_offset(6);
532 ///
533 /// // This loop prints "1, 3, 5, "
534 /// while ptr != end_rounded_up {
535 /// unsafe {
536 /// print!("{}, ", *ptr);
537 /// }
538 /// ptr = ptr.wrapping_offset(step);
539 /// }
540 /// ```
541 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
542 #[must_use = "returns a new pointer rather than modifying its argument"]
543 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
544 #[inline(always)]
545 pub const fn wrapping_offset(self, count: isize) -> *const T
546 where
547 T: Sized,
548 {
549 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
550 unsafe { intrinsics::arith_offset(self, count) }
551 }
552
553 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
554 ///
555 /// `count` is in units of **bytes**.
556 ///
557 /// This is purely a convenience for casting to a `u8` pointer and
558 /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
559 /// for documentation.
560 ///
561 /// For non-`Sized` pointees this operation changes only the data pointer,
562 /// leaving the metadata untouched.
563 #[must_use]
564 #[inline(always)]
565 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
566 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
567 #[rustc_allow_const_fn_unstable(set_ptr_value)]
568 pub const fn wrapping_byte_offset(self, count: isize) -> Self {
569 self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
570 }
571
572 /// Masks out bits of the pointer according to a mask.
573 ///
574 /// This is convenience for `ptr.map_addr(|a| a & mask)`.
575 ///
576 /// For non-`Sized` pointees this operation changes only the data pointer,
577 /// leaving the metadata untouched.
578 ///
579 /// ## Examples
580 ///
581 /// ```
582 /// #![feature(ptr_mask, strict_provenance)]
583 /// let v = 17_u32;
584 /// let ptr: *const u32 = &v;
585 ///
586 /// // `u32` is 4 bytes aligned,
587 /// // which means that lower 2 bits are always 0.
588 /// let tag_mask = 0b11;
589 /// let ptr_mask = !tag_mask;
590 ///
591 /// // We can store something in these lower bits
592 /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
593 ///
594 /// // Get the "tag" back
595 /// let tag = tagged_ptr.addr() & tag_mask;
596 /// assert_eq!(tag, 0b10);
597 ///
598 /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
599 /// // To get original pointer `mask` can be used:
600 /// let masked_ptr = tagged_ptr.mask(ptr_mask);
601 /// assert_eq!(unsafe { *masked_ptr }, 17);
602 /// ```
603 #[unstable(feature = "ptr_mask", issue = "98290")]
604 #[must_use = "returns a new pointer rather than modifying its argument"]
605 #[inline(always)]
606 pub fn mask(self, mask: usize) -> *const T {
607 intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
608 }
609
610 /// Calculates the distance between two pointers. The returned value is in
611 /// units of T: the distance in bytes divided by `mem::size_of::<T>()`.
612 ///
613 /// This is equivalent to `(self as isize - origin as isize) / (mem::size_of::<T>() as isize)`,
614 /// except that it has a lot more opportunities for UB, in exchange for the compiler
615 /// better understanding what you are doing.
616 ///
617 /// The primary motivation of this method is for computing the `len` of an array/slice
618 /// of `T` that you are currently representing as a "start" and "end" pointer
619 /// (and "end" is "one past the end" of the array).
620 /// In that case, `end.offset_from(start)` gets you the length of the array.
621 ///
622 /// All of the following safety requirements are trivially satisfied for this usecase.
623 ///
624 /// [`offset`]: #method.offset
625 ///
626 /// # Safety
627 ///
628 /// If any of the following conditions are violated, the result is Undefined
629 /// Behavior:
630 ///
631 /// * Both `self` and `origin` must be either in bounds or one
632 /// byte past the end of the same [allocated object].
633 ///
634 /// * Both pointers must be *derived from* a pointer to the same object.
635 /// (See below for an example.)
636 ///
637 /// * The distance between the pointers, in bytes, must be an exact multiple
638 /// of the size of `T`.
639 ///
640 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
641 ///
642 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
643 ///
644 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
645 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
646 /// the last two conditions. The standard library also generally ensures that allocations
647 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
648 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
649 /// always satisfies the last two conditions.
650 ///
651 /// Most platforms fundamentally can't even construct such a large allocation.
652 /// For instance, no known 64-bit platform can ever serve a request
653 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
654 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
655 /// more than `isize::MAX` bytes with things like Physical Address
656 /// Extension. As such, memory acquired directly from allocators or memory
657 /// mapped files *may* be too large to handle with this function.
658 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
659 /// such large allocations either.)
660 ///
661 /// The requirement for pointers to be derived from the same allocated object is primarily
662 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
663 /// objects is not known at compile-time. However, the requirement also exists at
664 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
665 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
666 /// origin as isize) / mem::size_of::<T>()`.
667 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
668 ///
669 /// [`add`]: #method.add
670 /// [allocated object]: crate::ptr#allocated-object
671 ///
672 /// # Panics
673 ///
674 /// This function panics if `T` is a Zero-Sized Type ("ZST").
675 ///
676 /// # Examples
677 ///
678 /// Basic usage:
679 ///
680 /// ```
681 /// let a = [0; 5];
682 /// let ptr1: *const i32 = &a[1];
683 /// let ptr2: *const i32 = &a[3];
684 /// unsafe {
685 /// assert_eq!(ptr2.offset_from(ptr1), 2);
686 /// assert_eq!(ptr1.offset_from(ptr2), -2);
687 /// assert_eq!(ptr1.offset(2), ptr2);
688 /// assert_eq!(ptr2.offset(-2), ptr1);
689 /// }
690 /// ```
691 ///
692 /// *Incorrect* usage:
693 ///
694 /// ```rust,no_run
695 /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
696 /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
697 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
698 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
699 /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff);
700 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
701 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
702 /// // computing their offset is undefined behavior, even though
703 /// // they point to the same address!
704 /// unsafe {
705 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
706 /// }
707 /// ```
708 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
709 #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
710 #[inline]
711 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
712 pub const unsafe fn offset_from(self, origin: *const T) -> isize
713 where
714 T: Sized,
715 {
716 let pointee_size = mem::size_of::<T>();
717 assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
718 // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
719 unsafe { intrinsics::ptr_offset_from(self, origin) }
720 }
721
722 /// Calculates the distance between two pointers. The returned value is in
723 /// units of **bytes**.
724 ///
725 /// This is purely a convenience for casting to a `u8` pointer and
726 /// using [`offset_from`][pointer::offset_from] on it. See that method for
727 /// documentation and safety requirements.
728 ///
729 /// For non-`Sized` pointees this operation considers only the data pointers,
730 /// ignoring the metadata.
731 #[inline(always)]
732 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
733 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
734 #[rustc_allow_const_fn_unstable(set_ptr_value)]
735 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
736 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
737 // SAFETY: the caller must uphold the safety contract for `offset_from`.
738 unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
739 }
740
741 /// Calculates the distance between two pointers, *where it's known that
742 /// `self` is equal to or greater than `origin`*. The returned value is in
743 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
744 ///
745 /// This computes the same value that [`offset_from`](#method.offset_from)
746 /// would compute, but with the added precondition that the offset is
747 /// guaranteed to be non-negative. This method is equivalent to
748 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
749 /// but it provides slightly more information to the optimizer, which can
750 /// sometimes allow it to optimize slightly better with some backends.
751 ///
752 /// This method can be though of as recovering the `count` that was passed
753 /// to [`add`](#method.add) (or, with the parameters in the other order,
754 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
755 /// that their safety preconditions are met:
756 /// ```rust
757 /// # #![feature(ptr_sub_ptr)]
758 /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool {
759 /// ptr.sub_ptr(origin) == count
760 /// # &&
761 /// origin.add(count) == ptr
762 /// # &&
763 /// ptr.sub(count) == origin
764 /// # }
765 /// ```
766 ///
767 /// # Safety
768 ///
769 /// - The distance between the pointers must be non-negative (`self >= origin`)
770 ///
771 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
772 /// apply to this method as well; see it for the full details.
773 ///
774 /// Importantly, despite the return type of this method being able to represent
775 /// a larger offset, it's still *not permitted* to pass pointers which differ
776 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
777 /// always be less than or equal to `isize::MAX as usize`.
778 ///
779 /// # Panics
780 ///
781 /// This function panics if `T` is a Zero-Sized Type ("ZST").
782 ///
783 /// # Examples
784 ///
785 /// ```
786 /// #![feature(ptr_sub_ptr)]
787 ///
788 /// let a = [0; 5];
789 /// let ptr1: *const i32 = &a[1];
790 /// let ptr2: *const i32 = &a[3];
791 /// unsafe {
792 /// assert_eq!(ptr2.sub_ptr(ptr1), 2);
793 /// assert_eq!(ptr1.add(2), ptr2);
794 /// assert_eq!(ptr2.sub(2), ptr1);
795 /// assert_eq!(ptr2.sub_ptr(ptr2), 0);
796 /// }
797 ///
798 /// // This would be incorrect, as the pointers are not correctly ordered:
799 /// // ptr1.sub_ptr(ptr2)
800 /// ```
801 #[unstable(feature = "ptr_sub_ptr", issue = "95892")]
802 #[rustc_const_unstable(feature = "const_ptr_sub_ptr", issue = "95892")]
803 #[inline]
804 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
805 pub const unsafe fn sub_ptr(self, origin: *const T) -> usize
806 where
807 T: Sized,
808 {
809 let this = self;
810 // SAFETY: The comparison has no side-effects, and the intrinsic
811 // does this check internally in the CTFE implementation.
812 unsafe {
813 assert_unsafe_precondition!(
814 "ptr::sub_ptr requires `this >= origin`",
815 [T](this: *const T, origin: *const T) => this >= origin
816 )
817 };
818
819 let pointee_size = mem::size_of::<T>();
820 assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
821 // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
822 unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
823 }
824
825 /// Returns whether two pointers are guaranteed to be equal.
826 ///
827 /// At runtime this function behaves like `Some(self == other)`.
828 /// However, in some contexts (e.g., compile-time evaluation),
829 /// it is not always possible to determine equality of two pointers, so this function may
830 /// spuriously return `None` for pointers that later actually turn out to have its equality known.
831 /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
832 ///
833 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
834 /// version and unsafe code must not
835 /// rely on the result of this function for soundness. It is suggested to only use this function
836 /// for performance optimizations where spurious `None` return values by this function do not
837 /// affect the outcome, but just the performance.
838 /// The consequences of using this method to make runtime and compile-time code behave
839 /// differently have not been explored. This method should not be used to introduce such
840 /// differences, and it should also not be stabilized before we have a better understanding
841 /// of this issue.
842 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
843 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
844 #[inline]
845 pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
846 where
847 T: Sized,
848 {
849 match intrinsics::ptr_guaranteed_cmp(self, other) {
850 2 => None,
851 other => Some(other == 1),
852 }
853 }
854
855 /// Returns whether two pointers are guaranteed to be inequal.
856 ///
857 /// At runtime this function behaves like `Some(self != other)`.
858 /// However, in some contexts (e.g., compile-time evaluation),
859 /// it is not always possible to determine inequality of two pointers, so this function may
860 /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
861 /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
862 ///
863 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
864 /// version and unsafe code must not
865 /// rely on the result of this function for soundness. It is suggested to only use this function
866 /// for performance optimizations where spurious `None` return values by this function do not
867 /// affect the outcome, but just the performance.
868 /// The consequences of using this method to make runtime and compile-time code behave
869 /// differently have not been explored. This method should not be used to introduce such
870 /// differences, and it should also not be stabilized before we have a better understanding
871 /// of this issue.
872 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
873 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
874 #[inline]
875 pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
876 where
877 T: Sized,
878 {
879 match self.guaranteed_eq(other) {
880 None => None,
881 Some(eq) => Some(!eq),
882 }
883 }
884
885 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
886 ///
887 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
888 /// offset of `3 * size_of::<T>()` bytes.
889 ///
890 /// # Safety
891 ///
892 /// If any of the following conditions are violated, the result is Undefined
893 /// Behavior:
894 ///
895 /// * Both the starting and resulting pointer must be either in bounds or one
896 /// byte past the end of the same [allocated object].
897 ///
898 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
899 ///
900 /// * The offset being in bounds cannot rely on "wrapping around" the address
901 /// space. That is, the infinite-precision sum must fit in a `usize`.
902 ///
903 /// The compiler and standard library generally tries to ensure allocations
904 /// never reach a size where an offset is a concern. For instance, `Vec`
905 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
906 /// `vec.as_ptr().add(vec.len())` is always safe.
907 ///
908 /// Most platforms fundamentally can't even construct such an allocation.
909 /// For instance, no known 64-bit platform can ever serve a request
910 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
911 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
912 /// more than `isize::MAX` bytes with things like Physical Address
913 /// Extension. As such, memory acquired directly from allocators or memory
914 /// mapped files *may* be too large to handle with this function.
915 ///
916 /// Consider using [`wrapping_add`] instead if these constraints are
917 /// difficult to satisfy. The only advantage of this method is that it
918 /// enables more aggressive compiler optimizations.
919 ///
920 /// [`wrapping_add`]: #method.wrapping_add
921 /// [allocated object]: crate::ptr#allocated-object
922 ///
923 /// # Examples
924 ///
925 /// ```
926 /// let s: &str = "123";
927 /// let ptr: *const u8 = s.as_ptr();
928 ///
929 /// unsafe {
930 /// println!("{}", *ptr.add(1) as char);
931 /// println!("{}", *ptr.add(2) as char);
932 /// }
933 /// ```
934 #[stable(feature = "pointer_methods", since = "1.26.0")]
935 #[must_use = "returns a new pointer rather than modifying its argument"]
936 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
937 #[inline(always)]
938 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
939 pub const unsafe fn add(self, count: usize) -> Self
940 where
941 T: Sized,
942 {
943 // SAFETY: the caller must uphold the safety contract for `offset`.
944 unsafe { intrinsics::offset(self, count) }
945 }
946
947 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
948 ///
949 /// `count` is in units of bytes.
950 ///
951 /// This is purely a convenience for casting to a `u8` pointer and
952 /// using [add][pointer::add] on it. See that method for documentation
953 /// and safety requirements.
954 ///
955 /// For non-`Sized` pointees this operation changes only the data pointer,
956 /// leaving the metadata untouched.
957 #[must_use]
958 #[inline(always)]
959 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
960 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
961 #[rustc_allow_const_fn_unstable(set_ptr_value)]
962 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
963 pub const unsafe fn byte_add(self, count: usize) -> Self {
964 // SAFETY: the caller must uphold the safety contract for `add`.
965 unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
966 }
967
968 /// Calculates the offset from a pointer (convenience for
969 /// `.offset((count as isize).wrapping_neg())`).
970 ///
971 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
972 /// offset of `3 * size_of::<T>()` bytes.
973 ///
974 /// # Safety
975 ///
976 /// If any of the following conditions are violated, the result is Undefined
977 /// Behavior:
978 ///
979 /// * Both the starting and resulting pointer must be either in bounds or one
980 /// byte past the end of the same [allocated object].
981 ///
982 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
983 ///
984 /// * The offset being in bounds cannot rely on "wrapping around" the address
985 /// space. That is, the infinite-precision sum must fit in a usize.
986 ///
987 /// The compiler and standard library generally tries to ensure allocations
988 /// never reach a size where an offset is a concern. For instance, `Vec`
989 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
990 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
991 ///
992 /// Most platforms fundamentally can't even construct such an allocation.
993 /// For instance, no known 64-bit platform can ever serve a request
994 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
995 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
996 /// more than `isize::MAX` bytes with things like Physical Address
997 /// Extension. As such, memory acquired directly from allocators or memory
998 /// mapped files *may* be too large to handle with this function.
999 ///
1000 /// Consider using [`wrapping_sub`] instead if these constraints are
1001 /// difficult to satisfy. The only advantage of this method is that it
1002 /// enables more aggressive compiler optimizations.
1003 ///
1004 /// [`wrapping_sub`]: #method.wrapping_sub
1005 /// [allocated object]: crate::ptr#allocated-object
1006 ///
1007 /// # Examples
1008 ///
1009 /// ```
1010 /// let s: &str = "123";
1011 ///
1012 /// unsafe {
1013 /// let end: *const u8 = s.as_ptr().add(3);
1014 /// println!("{}", *end.sub(1) as char);
1015 /// println!("{}", *end.sub(2) as char);
1016 /// }
1017 /// ```
1018 #[stable(feature = "pointer_methods", since = "1.26.0")]
1019 #[must_use = "returns a new pointer rather than modifying its argument"]
1020 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1021 // We could always go back to wrapping if unchecked becomes unacceptable
1022 #[rustc_allow_const_fn_unstable(const_int_unchecked_arith)]
1023 #[inline(always)]
1024 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1025 pub const unsafe fn sub(self, count: usize) -> Self
1026 where
1027 T: Sized,
1028 {
1029 if T::IS_ZST {
1030 // Pointer arithmetic does nothing when the pointee is a ZST.
1031 self
1032 } else {
1033 // SAFETY: the caller must uphold the safety contract for `offset`.
1034 // Because the pointee is *not* a ZST, that means that `count` is
1035 // at most `isize::MAX`, and thus the negation cannot overflow.
1036 unsafe { self.offset(intrinsics::unchecked_sub(0, count as isize)) }
1037 }
1038 }
1039
1040 /// Calculates the offset from a pointer in bytes (convenience for
1041 /// `.byte_offset((count as isize).wrapping_neg())`).
1042 ///
1043 /// `count` is in units of bytes.
1044 ///
1045 /// This is purely a convenience for casting to a `u8` pointer and
1046 /// using [sub][pointer::sub] on it. See that method for documentation
1047 /// and safety requirements.
1048 ///
1049 /// For non-`Sized` pointees this operation changes only the data pointer,
1050 /// leaving the metadata untouched.
1051 #[must_use]
1052 #[inline(always)]
1053 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1054 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1055 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1056 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1057 pub const unsafe fn byte_sub(self, count: usize) -> Self {
1058 // SAFETY: the caller must uphold the safety contract for `sub`.
1059 unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1060 }
1061
1062 /// Calculates the offset from a pointer using wrapping arithmetic.
1063 /// (convenience for `.wrapping_offset(count as isize)`)
1064 ///
1065 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1066 /// offset of `3 * size_of::<T>()` bytes.
1067 ///
1068 /// # Safety
1069 ///
1070 /// This operation itself is always safe, but using the resulting pointer is not.
1071 ///
1072 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1073 /// be used to read or write other allocated objects.
1074 ///
1075 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1076 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1077 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1078 /// `x` and `y` point into the same allocated object.
1079 ///
1080 /// Compared to [`add`], this method basically delays the requirement of staying within the
1081 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1082 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1083 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1084 /// can be optimized better and is thus preferable in performance-sensitive code.
1085 ///
1086 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1087 /// intermediate values used during the computation of the final result. For example,
1088 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1089 /// allocated object and then re-entering it later is permitted.
1090 ///
1091 /// [`add`]: #method.add
1092 /// [allocated object]: crate::ptr#allocated-object
1093 ///
1094 /// # Examples
1095 ///
1096 /// ```
1097 /// // Iterate using a raw pointer in increments of two elements
1098 /// let data = [1u8, 2, 3, 4, 5];
1099 /// let mut ptr: *const u8 = data.as_ptr();
1100 /// let step = 2;
1101 /// let end_rounded_up = ptr.wrapping_add(6);
1102 ///
1103 /// // This loop prints "1, 3, 5, "
1104 /// while ptr != end_rounded_up {
1105 /// unsafe {
1106 /// print!("{}, ", *ptr);
1107 /// }
1108 /// ptr = ptr.wrapping_add(step);
1109 /// }
1110 /// ```
1111 #[stable(feature = "pointer_methods", since = "1.26.0")]
1112 #[must_use = "returns a new pointer rather than modifying its argument"]
1113 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1114 #[inline(always)]
1115 pub const fn wrapping_add(self, count: usize) -> Self
1116 where
1117 T: Sized,
1118 {
1119 self.wrapping_offset(count as isize)
1120 }
1121
1122 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1123 /// (convenience for `.wrapping_byte_offset(count as isize)`)
1124 ///
1125 /// `count` is in units of bytes.
1126 ///
1127 /// This is purely a convenience for casting to a `u8` pointer and
1128 /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1129 ///
1130 /// For non-`Sized` pointees this operation changes only the data pointer,
1131 /// leaving the metadata untouched.
1132 #[must_use]
1133 #[inline(always)]
1134 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1135 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1136 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1137 pub const fn wrapping_byte_add(self, count: usize) -> Self {
1138 self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1139 }
1140
1141 /// Calculates the offset from a pointer using wrapping arithmetic.
1142 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
1143 ///
1144 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1145 /// offset of `3 * size_of::<T>()` bytes.
1146 ///
1147 /// # Safety
1148 ///
1149 /// This operation itself is always safe, but using the resulting pointer is not.
1150 ///
1151 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1152 /// be used to read or write other allocated objects.
1153 ///
1154 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1155 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1156 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1157 /// `x` and `y` point into the same allocated object.
1158 ///
1159 /// Compared to [`sub`], this method basically delays the requirement of staying within the
1160 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1161 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1162 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1163 /// can be optimized better and is thus preferable in performance-sensitive code.
1164 ///
1165 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1166 /// intermediate values used during the computation of the final result. For example,
1167 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1168 /// allocated object and then re-entering it later is permitted.
1169 ///
1170 /// [`sub`]: #method.sub
1171 /// [allocated object]: crate::ptr#allocated-object
1172 ///
1173 /// # Examples
1174 ///
1175 /// ```
1176 /// // Iterate using a raw pointer in increments of two elements (backwards)
1177 /// let data = [1u8, 2, 3, 4, 5];
1178 /// let mut ptr: *const u8 = data.as_ptr();
1179 /// let start_rounded_down = ptr.wrapping_sub(2);
1180 /// ptr = ptr.wrapping_add(4);
1181 /// let step = 2;
1182 /// // This loop prints "5, 3, 1, "
1183 /// while ptr != start_rounded_down {
1184 /// unsafe {
1185 /// print!("{}, ", *ptr);
1186 /// }
1187 /// ptr = ptr.wrapping_sub(step);
1188 /// }
1189 /// ```
1190 #[stable(feature = "pointer_methods", since = "1.26.0")]
1191 #[must_use = "returns a new pointer rather than modifying its argument"]
1192 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1193 #[inline(always)]
1194 pub const fn wrapping_sub(self, count: usize) -> Self
1195 where
1196 T: Sized,
1197 {
1198 self.wrapping_offset((count as isize).wrapping_neg())
1199 }
1200
1201 /// Calculates the offset from a pointer in bytes using wrapping arithmetic.
1202 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
1203 ///
1204 /// `count` is in units of bytes.
1205 ///
1206 /// This is purely a convenience for casting to a `u8` pointer and
1207 /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1208 ///
1209 /// For non-`Sized` pointees this operation changes only the data pointer,
1210 /// leaving the metadata untouched.
1211 #[must_use]
1212 #[inline(always)]
1213 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1214 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1215 #[rustc_allow_const_fn_unstable(set_ptr_value)]
1216 pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1217 self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1218 }
1219
1220 /// Reads the value from `self` without moving it. This leaves the
1221 /// memory in `self` unchanged.
1222 ///
1223 /// See [`ptr::read`] for safety concerns and examples.
1224 ///
1225 /// [`ptr::read`]: crate::ptr::read()
1226 #[stable(feature = "pointer_methods", since = "1.26.0")]
1227 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1228 #[inline]
1229 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1230 pub const unsafe fn read(self) -> T
1231 where
1232 T: Sized,
1233 {
1234 // SAFETY: the caller must uphold the safety contract for `read`.
1235 unsafe { read(self) }
1236 }
1237
1238 /// Performs a volatile read of the value from `self` without moving it. This
1239 /// leaves the memory in `self` unchanged.
1240 ///
1241 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1242 /// to not be elided or reordered by the compiler across other volatile
1243 /// operations.
1244 ///
1245 /// See [`ptr::read_volatile`] for safety concerns and examples.
1246 ///
1247 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1248 #[stable(feature = "pointer_methods", since = "1.26.0")]
1249 #[inline]
1250 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1251 pub unsafe fn read_volatile(self) -> T
1252 where
1253 T: Sized,
1254 {
1255 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1256 unsafe { read_volatile(self) }
1257 }
1258
1259 /// Reads the value from `self` without moving it. This leaves the
1260 /// memory in `self` unchanged.
1261 ///
1262 /// Unlike `read`, the pointer may be unaligned.
1263 ///
1264 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1265 ///
1266 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1267 #[stable(feature = "pointer_methods", since = "1.26.0")]
1268 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1269 #[inline]
1270 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1271 pub const unsafe fn read_unaligned(self) -> T
1272 where
1273 T: Sized,
1274 {
1275 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1276 unsafe { read_unaligned(self) }
1277 }
1278
1279 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1280 /// and destination may overlap.
1281 ///
1282 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1283 ///
1284 /// See [`ptr::copy`] for safety concerns and examples.
1285 ///
1286 /// [`ptr::copy`]: crate::ptr::copy()
1287 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
1288 #[stable(feature = "pointer_methods", since = "1.26.0")]
1289 #[inline]
1290 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1291 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1292 where
1293 T: Sized,
1294 {
1295 // SAFETY: the caller must uphold the safety contract for `copy`.
1296 unsafe { copy(self, dest, count) }
1297 }
1298
1299 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1300 /// and destination may *not* overlap.
1301 ///
1302 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1303 ///
1304 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1305 ///
1306 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1307 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
1308 #[stable(feature = "pointer_methods", since = "1.26.0")]
1309 #[inline]
1310 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1311 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1312 where
1313 T: Sized,
1314 {
1315 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1316 unsafe { copy_nonoverlapping(self, dest, count) }
1317 }
1318
1319 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1320 /// `align`.
1321 ///
1322 /// If it is not possible to align the pointer, the implementation returns
1323 /// `usize::MAX`. It is permissible for the implementation to *always*
1324 /// return `usize::MAX`. Only your algorithm's performance can depend
1325 /// on getting a usable offset here, not its correctness.
1326 ///
1327 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1328 /// used with the `wrapping_add` method.
1329 ///
1330 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1331 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1332 /// the returned offset is correct in all terms other than alignment.
1333 ///
1334 /// # Panics
1335 ///
1336 /// The function panics if `align` is not a power-of-two.
1337 ///
1338 /// # Examples
1339 ///
1340 /// Accessing adjacent `u8` as `u16`
1341 ///
1342 /// ```
1343 /// use std::mem::align_of;
1344 ///
1345 /// # unsafe {
1346 /// let x = [5_u8, 6, 7, 8, 9];
1347 /// let ptr = x.as_ptr();
1348 /// let offset = ptr.align_offset(align_of::<u16>());
1349 ///
1350 /// if offset < x.len() - 1 {
1351 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1352 /// assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
1353 /// } else {
1354 /// // while the pointer can be aligned via `offset`, it would point
1355 /// // outside the allocation
1356 /// }
1357 /// # }
1358 /// ```
1359 #[must_use]
1360 #[inline]
1361 #[stable(feature = "align_offset", since = "1.36.0")]
1362 #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")]
1363 pub const fn align_offset(self, align: usize) -> usize
1364 where
1365 T: Sized,
1366 {
1367 if !align.is_power_of_two() {
1368 panic!("align_offset: align is not a power-of-two");
1369 }
1370
1371 // SAFETY: `align` has been checked to be a power of 2 above
1372 let ret = unsafe { align_offset(self, align) };
1373
1374 // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1375 #[cfg(miri)]
1376 if ret != usize::MAX {
1377 intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
1378 }
1379
1380 ret
1381 }
1382
1383 /// Returns whether the pointer is properly aligned for `T`.
1384 ///
1385 /// # Examples
1386 ///
1387 /// ```
1388 /// #![feature(pointer_is_aligned)]
1389 ///
1390 /// // On some platforms, the alignment of i32 is less than 4.
1391 /// #[repr(align(4))]
1392 /// struct AlignedI32(i32);
1393 ///
1394 /// let data = AlignedI32(42);
1395 /// let ptr = &data as *const AlignedI32;
1396 ///
1397 /// assert!(ptr.is_aligned());
1398 /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1399 /// ```
1400 ///
1401 /// # At compiletime
1402 /// **Note: Alignment at compiletime is experimental and subject to change. See the
1403 /// [tracking issue] for details.**
1404 ///
1405 /// At compiletime, the compiler may not know where a value will end up in memory.
1406 /// Calling this function on a pointer created from a reference at compiletime will only
1407 /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
1408 /// is never aligned if cast to a type with a stricter alignment than the reference's
1409 /// underlying allocation.
1410 ///
1411 /// ```
1412 /// #![feature(pointer_is_aligned)]
1413 /// #![feature(const_pointer_is_aligned)]
1414 ///
1415 /// // On some platforms, the alignment of primitives is less than their size.
1416 /// #[repr(align(4))]
1417 /// struct AlignedI32(i32);
1418 /// #[repr(align(8))]
1419 /// struct AlignedI64(i64);
1420 ///
1421 /// const _: () = {
1422 /// let data = AlignedI32(42);
1423 /// let ptr = &data as *const AlignedI32;
1424 /// assert!(ptr.is_aligned());
1425 ///
1426 /// // At runtime either `ptr1` or `ptr2` would be aligned, but at compiletime neither is aligned.
1427 /// let ptr1 = ptr.cast::<AlignedI64>();
1428 /// let ptr2 = ptr.wrapping_add(1).cast::<AlignedI64>();
1429 /// assert!(!ptr1.is_aligned());
1430 /// assert!(!ptr2.is_aligned());
1431 /// };
1432 /// ```
1433 ///
1434 /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
1435 /// pointer is aligned, even if the compiletime pointer wasn't aligned.
1436 ///
1437 /// ```
1438 /// #![feature(pointer_is_aligned)]
1439 /// #![feature(const_pointer_is_aligned)]
1440 ///
1441 /// // On some platforms, the alignment of primitives is less than their size.
1442 /// #[repr(align(4))]
1443 /// struct AlignedI32(i32);
1444 /// #[repr(align(8))]
1445 /// struct AlignedI64(i64);
1446 ///
1447 /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned.
1448 /// const COMPTIME_PTR: *const AlignedI32 = &AlignedI32(42);
1449 /// const _: () = assert!(!COMPTIME_PTR.cast::<AlignedI64>().is_aligned());
1450 /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).cast::<AlignedI64>().is_aligned());
1451 ///
1452 /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned.
1453 /// let runtime_ptr = COMPTIME_PTR;
1454 /// assert_ne!(
1455 /// runtime_ptr.cast::<AlignedI64>().is_aligned(),
1456 /// runtime_ptr.wrapping_add(1).cast::<AlignedI64>().is_aligned(),
1457 /// );
1458 /// ```
1459 ///
1460 /// If a pointer is created from a fixed address, this function behaves the same during
1461 /// runtime and compiletime.
1462 ///
1463 /// ```
1464 /// #![feature(pointer_is_aligned)]
1465 /// #![feature(const_pointer_is_aligned)]
1466 ///
1467 /// // On some platforms, the alignment of primitives is less than their size.
1468 /// #[repr(align(4))]
1469 /// struct AlignedI32(i32);
1470 /// #[repr(align(8))]
1471 /// struct AlignedI64(i64);
1472 ///
1473 /// const _: () = {
1474 /// let ptr = 40 as *const AlignedI32;
1475 /// assert!(ptr.is_aligned());
1476 ///
1477 /// // For pointers with a known address, runtime and compiletime behavior are identical.
1478 /// let ptr1 = ptr.cast::<AlignedI64>();
1479 /// let ptr2 = ptr.wrapping_add(1).cast::<AlignedI64>();
1480 /// assert!(ptr1.is_aligned());
1481 /// assert!(!ptr2.is_aligned());
1482 /// };
1483 /// ```
1484 ///
1485 /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203
1486 #[must_use]
1487 #[inline]
1488 #[unstable(feature = "pointer_is_aligned", issue = "96284")]
1489 #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")]
1490 pub const fn is_aligned(self) -> bool
1491 where
1492 T: Sized,
1493 {
1494 self.is_aligned_to(mem::align_of::<T>())
1495 }
1496
1497 /// Returns whether the pointer is aligned to `align`.
1498 ///
1499 /// For non-`Sized` pointees this operation considers only the data pointer,
1500 /// ignoring the metadata.
1501 ///
1502 /// # Panics
1503 ///
1504 /// The function panics if `align` is not a power-of-two (this includes 0).
1505 ///
1506 /// # Examples
1507 ///
1508 /// ```
1509 /// #![feature(pointer_is_aligned)]
1510 ///
1511 /// // On some platforms, the alignment of i32 is less than 4.
1512 /// #[repr(align(4))]
1513 /// struct AlignedI32(i32);
1514 ///
1515 /// let data = AlignedI32(42);
1516 /// let ptr = &data as *const AlignedI32;
1517 ///
1518 /// assert!(ptr.is_aligned_to(1));
1519 /// assert!(ptr.is_aligned_to(2));
1520 /// assert!(ptr.is_aligned_to(4));
1521 ///
1522 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1523 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1524 ///
1525 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1526 /// ```
1527 ///
1528 /// # At compiletime
1529 /// **Note: Alignment at compiletime is experimental and subject to change. See the
1530 /// [tracking issue] for details.**
1531 ///
1532 /// At compiletime, the compiler may not know where a value will end up in memory.
1533 /// Calling this function on a pointer created from a reference at compiletime will only
1534 /// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
1535 /// cannot be stricter aligned than the reference's underlying allocation.
1536 ///
1537 /// ```
1538 /// #![feature(pointer_is_aligned)]
1539 /// #![feature(const_pointer_is_aligned)]
1540 ///
1541 /// // On some platforms, the alignment of i32 is less than 4.
1542 /// #[repr(align(4))]
1543 /// struct AlignedI32(i32);
1544 ///
1545 /// const _: () = {
1546 /// let data = AlignedI32(42);
1547 /// let ptr = &data as *const AlignedI32;
1548 ///
1549 /// assert!(ptr.is_aligned_to(1));
1550 /// assert!(ptr.is_aligned_to(2));
1551 /// assert!(ptr.is_aligned_to(4));
1552 ///
1553 /// // At compiletime, we know for sure that the pointer isn't aligned to 8.
1554 /// assert!(!ptr.is_aligned_to(8));
1555 /// assert!(!ptr.wrapping_add(1).is_aligned_to(8));
1556 /// };
1557 /// ```
1558 ///
1559 /// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
1560 /// pointer is aligned, even if the compiletime pointer wasn't aligned.
1561 ///
1562 /// ```
1563 /// #![feature(pointer_is_aligned)]
1564 /// #![feature(const_pointer_is_aligned)]
1565 ///
1566 /// // On some platforms, the alignment of i32 is less than 4.
1567 /// #[repr(align(4))]
1568 /// struct AlignedI32(i32);
1569 ///
1570 /// // At compiletime, neither `COMPTIME_PTR` nor `COMPTIME_PTR + 1` is aligned.
1571 /// const COMPTIME_PTR: *const AlignedI32 = &AlignedI32(42);
1572 /// const _: () = assert!(!COMPTIME_PTR.is_aligned_to(8));
1573 /// const _: () = assert!(!COMPTIME_PTR.wrapping_add(1).is_aligned_to(8));
1574 ///
1575 /// // At runtime, either `runtime_ptr` or `runtime_ptr + 1` is aligned.
1576 /// let runtime_ptr = COMPTIME_PTR;
1577 /// assert_ne!(
1578 /// runtime_ptr.is_aligned_to(8),
1579 /// runtime_ptr.wrapping_add(1).is_aligned_to(8),
1580 /// );
1581 /// ```
1582 ///
1583 /// If a pointer is created from a fixed address, this function behaves the same during
1584 /// runtime and compiletime.
1585 ///
1586 /// ```
1587 /// #![feature(pointer_is_aligned)]
1588 /// #![feature(const_pointer_is_aligned)]
1589 ///
1590 /// const _: () = {
1591 /// let ptr = 40 as *const u8;
1592 /// assert!(ptr.is_aligned_to(1));
1593 /// assert!(ptr.is_aligned_to(2));
1594 /// assert!(ptr.is_aligned_to(4));
1595 /// assert!(ptr.is_aligned_to(8));
1596 /// assert!(!ptr.is_aligned_to(16));
1597 /// };
1598 /// ```
1599 ///
1600 /// [tracking issue]: https://github.com/rust-lang/rust/issues/104203
1601 #[must_use]
1602 #[inline]
1603 #[unstable(feature = "pointer_is_aligned", issue = "96284")]
1604 #[rustc_const_unstable(feature = "const_pointer_is_aligned", issue = "104203")]
1605 pub const fn is_aligned_to(self, align: usize) -> bool {
1606 if !align.is_power_of_two() {
1607 panic!("is_aligned_to: align is not a power-of-two");
1608 }
1609
1610 #[inline]
1611 fn runtime_impl(ptr: *const (), align: usize) -> bool {
1612 ptr.addr() & (align - 1) == 0
1613 }
1614
1615 #[inline]
1616 const fn const_impl(ptr: *const (), align: usize) -> bool {
1617 // We can't use the address of `self` in a `const fn`, so we use `align_offset` instead.
1618 // The cast to `()` is used to
1619 // 1. deal with fat pointers; and
1620 // 2. ensure that `align_offset` doesn't actually try to compute an offset.
1621 ptr.align_offset(align) == 0
1622 }
1623
1624 // SAFETY: The two versions are equivalent at runtime.
1625 unsafe { const_eval_select((self.cast::<()>(), align), const_impl, runtime_impl) }
1626 }
1627}
1628
1629impl<T> *const [T] {
1630 /// Returns the length of a raw slice.
1631 ///
1632 /// The returned value is the number of **elements**, not the number of bytes.
1633 ///
1634 /// This function is safe, even when the raw slice cannot be cast to a slice
1635 /// reference because the pointer is null or unaligned.
1636 ///
1637 /// # Examples
1638 ///
1639 /// ```rust
1640 /// #![feature(slice_ptr_len)]
1641 ///
1642 /// use std::ptr;
1643 ///
1644 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1645 /// assert_eq!(slice.len(), 3);
1646 /// ```
1647 #[inline]
1648 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1649 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1650 pub const fn len(self) -> usize {
1651 metadata(self)
1652 }
1653
1654 /// Returns `true` if the raw slice has a length of 0.
1655 ///
1656 /// # Examples
1657 ///
1658 /// ```
1659 /// #![feature(slice_ptr_len)]
1660 /// use std::ptr;
1661 ///
1662 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1663 /// assert!(!slice.is_empty());
1664 /// ```
1665 #[inline(always)]
1666 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1667 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1668 pub const fn is_empty(self) -> bool {
1669 self.len() == 0
1670 }
1671
1672 /// Returns a raw pointer to the slice's buffer.
1673 ///
1674 /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1675 ///
1676 /// # Examples
1677 ///
1678 /// ```rust
1679 /// #![feature(slice_ptr_get)]
1680 /// use std::ptr;
1681 ///
1682 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1683 /// assert_eq!(slice.as_ptr(), ptr::null());
1684 /// ```
1685 #[inline]
1686 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1687 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
1688 pub const fn as_ptr(self) -> *const T {
1689 self as *const T
1690 }
1691
1692 /// Returns a raw pointer to an element or subslice, without doing bounds
1693 /// checking.
1694 ///
1695 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1696 /// is *[undefined behavior]* even if the resulting pointer is not used.
1697 ///
1698 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1699 ///
1700 /// # Examples
1701 ///
1702 /// ```
1703 /// #![feature(slice_ptr_get)]
1704 ///
1705 /// let x = &[1, 2, 4] as *const [i32];
1706 ///
1707 /// unsafe {
1708 /// assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
1709 /// }
1710 /// ```
1711 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1712 #[inline]
1713 pub unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
1714 where
1715 I: SliceIndex<[T]>,
1716 {
1717 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1718 unsafe { index.get_unchecked(self) }
1719 }
1720
1721 /// Returns `None` if the pointer is null, or else returns a shared slice to
1722 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1723 /// that the value has to be initialized.
1724 ///
1725 /// [`as_ref`]: #method.as_ref
1726 ///
1727 /// # Safety
1728 ///
1729 /// When calling this method, you have to ensure that *either* the pointer is null *or*
1730 /// all of the following is true:
1731 ///
1732 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
1733 /// and it must be properly aligned. This means in particular:
1734 ///
1735 /// * The entire memory range of this slice must be contained within a single [allocated object]!
1736 /// Slices can never span across multiple allocated objects.
1737 ///
1738 /// * The pointer must be aligned even for zero-length slices. One
1739 /// reason for this is that enum layout optimizations may rely on references
1740 /// (including slices of any length) being aligned and non-null to distinguish
1741 /// them from other data. You can obtain a pointer that is usable as `data`
1742 /// for zero-length slices using [`NonNull::dangling()`].
1743 ///
1744 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1745 /// See the safety documentation of [`pointer::offset`].
1746 ///
1747 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1748 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1749 /// In particular, while this reference exists, the memory the pointer points to must
1750 /// not get mutated (except inside `UnsafeCell`).
1751 ///
1752 /// This applies even if the result of this method is unused!
1753 ///
1754 /// See also [`slice::from_raw_parts`][].
1755 ///
1756 /// [valid]: crate::ptr#safety
1757 /// [allocated object]: crate::ptr#allocated-object
1758 #[inline]
1759 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1760 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
1761 pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1762 if self.is_null() {
1763 None
1764 } else {
1765 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1766 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1767 }
1768 }
1769}
1770
1771// Equality for pointers
1772#[stable(feature = "rust1", since = "1.0.0")]
1773impl<T: ?Sized> PartialEq for *const T {
1774 #[inline]
1775 #[allow(ambiguous_wide_pointer_comparisons)]
1776 fn eq(&self, other: &*const T) -> bool {
1777 *self == *other
1778 }
1779}
1780
1781#[stable(feature = "rust1", since = "1.0.0")]
1782impl<T: ?Sized> Eq for *const T {}
1783
1784// Comparison for pointers
1785#[stable(feature = "rust1", since = "1.0.0")]
1786impl<T: ?Sized> Ord for *const T {
1787 #[inline]
1788 #[allow(ambiguous_wide_pointer_comparisons)]
1789 fn cmp(&self, other: &*const T) -> Ordering {
1790 if self < other {
1791 Less
1792 } else if self == other {
1793 Equal
1794 } else {
1795 Greater
1796 }
1797 }
1798}
1799
1800#[stable(feature = "rust1", since = "1.0.0")]
1801impl<T: ?Sized> PartialOrd for *const T {
1802 #[inline]
1803 fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1804 Some(self.cmp(other))
1805 }
1806
1807 #[inline]
1808 #[allow(ambiguous_wide_pointer_comparisons)]
1809 fn lt(&self, other: &*const T) -> bool {
1810 *self < *other
1811 }
1812
1813 #[inline]
1814 #[allow(ambiguous_wide_pointer_comparisons)]
1815 fn le(&self, other: &*const T) -> bool {
1816 *self <= *other
1817 }
1818
1819 #[inline]
1820 #[allow(ambiguous_wide_pointer_comparisons)]
1821 fn gt(&self, other: &*const T) -> bool {
1822 *self > *other
1823 }
1824
1825 #[inline]
1826 #[allow(ambiguous_wide_pointer_comparisons)]
1827 fn ge(&self, other: &*const T) -> bool {
1828 *self >= *other
1829 }
1830}
1831