1use crate::cmp::Ordering;
2use crate::marker::Unsize;
3use crate::mem::{MaybeUninit, SizedTypeProperties};
4use crate::num::NonZero;
5use crate::ops::{CoerceUnsized, DispatchFromDyn};
6use crate::pin::PinCoerceUnsized;
7use crate::ptr::Unique;
8use crate::slice::{self, SliceIndex};
9use crate::ub_checks::assert_unsafe_precondition;
10use crate::{fmt, hash, intrinsics, mem, ptr};
11
12/// `*mut T` but non-zero and [covariant].
13///
14/// This is often the correct thing to use when building data structures using
15/// raw pointers, but is ultimately more dangerous to use because of its additional
16/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
17///
18/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
19/// is never dereferenced. This is so that enums may use this forbidden value
20/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
21/// However the pointer may still dangle if it isn't dereferenced.
22///
23/// Unlike `*mut T`, `NonNull<T>` was chosen to be covariant over `T`. This makes it
24/// possible to use `NonNull<T>` when building covariant types, but introduces the
25/// risk of unsoundness if used in a type that shouldn't actually be covariant.
26/// (The opposite choice was made for `*mut T` even though technically the unsoundness
27/// could only be caused by calling unsafe functions.)
28///
29/// Covariance is correct for most safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
30/// and `LinkedList`. This is the case because they provide a public API that follows the
31/// normal shared XOR mutable rules of Rust.
32///
33/// If your type cannot safely be covariant, you must ensure it contains some
34/// additional field to provide invariance. Often this field will be a [`PhantomData`]
35/// type like `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
36///
37/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
38/// not change the fact that mutating through a (pointer derived from a) shared
39/// reference is undefined behavior unless the mutation happens inside an
40/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
41/// reference. When using this `From` instance without an `UnsafeCell<T>`,
42/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
43/// is never used for mutation.
44///
45/// # Representation
46///
47/// Thanks to the [null pointer optimization],
48/// `NonNull<T>` and `Option<NonNull<T>>`
49/// are guaranteed to have the same size and alignment:
50///
51/// ```
52/// use std::ptr::NonNull;
53///
54/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
55/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
56///
57/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
58/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
59/// ```
60///
61/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
62/// [`PhantomData`]: crate::marker::PhantomData
63/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
64/// [null pointer optimization]: crate::option#representation
65#[stable(feature = "nonnull", since = "1.25.0")]
66#[repr(transparent)]
67#[rustc_layout_scalar_valid_range_start(1)]
68#[rustc_nonnull_optimization_guaranteed]
69#[rustc_diagnostic_item = "NonNull"]
70pub struct NonNull<T: ?Sized> {
71 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
72 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
73 pointer: *const T,
74}
75
76/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
77// N.B., this impl is unnecessary, but should provide better error messages.
78#[stable(feature = "nonnull", since = "1.25.0")]
79impl<T: ?Sized> !Send for NonNull<T> {}
80
81/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
82// N.B., this impl is unnecessary, but should provide better error messages.
83#[stable(feature = "nonnull", since = "1.25.0")]
84impl<T: ?Sized> !Sync for NonNull<T> {}
85
86impl<T: Sized> NonNull<T> {
87 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
88 ///
89 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
90 ///
91 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
92 #[stable(feature = "nonnull_provenance", since = "CURRENT_RUSTC_VERSION")]
93 #[rustc_const_stable(feature = "nonnull_provenance", since = "CURRENT_RUSTC_VERSION")]
94 #[must_use]
95 #[inline]
96 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
97 let pointer = crate::ptr::without_provenance(addr.get());
98 // SAFETY: we know `addr` is non-zero.
99 unsafe { NonNull { pointer } }
100 }
101
102 /// Creates a new `NonNull` that is dangling, but well-aligned.
103 ///
104 /// This is useful for initializing types which lazily allocate, like
105 /// `Vec::new` does.
106 ///
107 /// Note that the pointer value may potentially represent a valid pointer to
108 /// a `T`, which means this must not be used as a "not yet initialized"
109 /// sentinel value. Types that lazily allocate must track initialization by
110 /// some other means.
111 ///
112 /// # Examples
113 ///
114 /// ```
115 /// use std::ptr::NonNull;
116 ///
117 /// let ptr = NonNull::<u32>::dangling();
118 /// // Important: don't try to access the value of `ptr` without
119 /// // initializing it first! The pointer is not null but isn't valid either!
120 /// ```
121 #[stable(feature = "nonnull", since = "1.25.0")]
122 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
123 #[must_use]
124 #[inline]
125 pub const fn dangling() -> Self {
126 let align = crate::ptr::Alignment::of::<T>();
127 NonNull::without_provenance(align.as_nonzero())
128 }
129
130 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
131 /// [provenance][crate::ptr#provenance].
132 ///
133 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
134 ///
135 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
136 #[stable(feature = "nonnull_provenance", since = "CURRENT_RUSTC_VERSION")]
137 #[inline]
138 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
139 // SAFETY: we know `addr` is non-zero.
140 unsafe {
141 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
142 NonNull::new_unchecked(ptr)
143 }
144 }
145
146 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
147 /// that the value has to be initialized.
148 ///
149 /// For the mutable counterpart see [`as_uninit_mut`].
150 ///
151 /// [`as_ref`]: NonNull::as_ref
152 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
153 ///
154 /// # Safety
155 ///
156 /// When calling this method, you have to ensure that
157 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
158 /// Note that because the created reference is to `MaybeUninit<T>`, the
159 /// source pointer can point to uninitialized memory.
160 #[inline]
161 #[must_use]
162 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
163 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
164 // SAFETY: the caller must guarantee that `self` meets all the
165 // requirements for a reference.
166 unsafe { &*self.cast().as_ptr() }
167 }
168
169 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
170 /// that the value has to be initialized.
171 ///
172 /// For the shared counterpart see [`as_uninit_ref`].
173 ///
174 /// [`as_mut`]: NonNull::as_mut
175 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
176 ///
177 /// # Safety
178 ///
179 /// When calling this method, you have to ensure that
180 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
181 /// Note that because the created reference is to `MaybeUninit<T>`, the
182 /// source pointer can point to uninitialized memory.
183 #[inline]
184 #[must_use]
185 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
186 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
187 // SAFETY: the caller must guarantee that `self` meets all the
188 // requirements for a reference.
189 unsafe { &mut *self.cast().as_ptr() }
190 }
191}
192
193impl<T: ?Sized> NonNull<T> {
194 /// Creates a new `NonNull`.
195 ///
196 /// # Safety
197 ///
198 /// `ptr` must be non-null.
199 ///
200 /// # Examples
201 ///
202 /// ```
203 /// use std::ptr::NonNull;
204 ///
205 /// let mut x = 0u32;
206 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
207 /// ```
208 ///
209 /// *Incorrect* usage of this function:
210 ///
211 /// ```rust,no_run
212 /// use std::ptr::NonNull;
213 ///
214 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
215 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
216 /// ```
217 #[stable(feature = "nonnull", since = "1.25.0")]
218 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
219 #[inline]
220 #[track_caller]
221 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
222 // SAFETY: the caller must guarantee that `ptr` is non-null.
223 unsafe {
224 assert_unsafe_precondition!(
225 check_language_ub,
226 "NonNull::new_unchecked requires that the pointer is non-null",
227 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
228 );
229 NonNull { pointer: ptr as _ }
230 }
231 }
232
233 /// Creates a new `NonNull` if `ptr` is non-null.
234 ///
235 /// # Panics during const evaluation
236 ///
237 /// This method will panic during const evaluation if the pointer cannot be
238 /// determined to be null or not. See [`is_null`] for more information.
239 ///
240 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
241 ///
242 /// # Examples
243 ///
244 /// ```
245 /// use std::ptr::NonNull;
246 ///
247 /// let mut x = 0u32;
248 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
249 ///
250 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
251 /// unreachable!();
252 /// }
253 /// ```
254 #[stable(feature = "nonnull", since = "1.25.0")]
255 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
256 #[inline]
257 pub const fn new(ptr: *mut T) -> Option<Self> {
258 if !ptr.is_null() {
259 // SAFETY: The pointer is already checked and is not null
260 Some(unsafe { Self::new_unchecked(ptr) })
261 } else {
262 None
263 }
264 }
265
266 /// Converts a reference to a `NonNull` pointer.
267 #[stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
268 #[rustc_const_stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
269 #[inline]
270 pub const fn from_ref(r: &T) -> Self {
271 // SAFETY: A reference cannot be null.
272 unsafe { NonNull { pointer: r as *const T } }
273 }
274
275 /// Converts a mutable reference to a `NonNull` pointer.
276 #[stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
277 #[rustc_const_stable(feature = "non_null_from_ref", since = "CURRENT_RUSTC_VERSION")]
278 #[inline]
279 pub const fn from_mut(r: &mut T) -> Self {
280 // SAFETY: A mutable reference cannot be null.
281 unsafe { NonNull { pointer: r as *mut T } }
282 }
283
284 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
285 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
286 ///
287 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
288 ///
289 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
290 #[unstable(feature = "ptr_metadata", issue = "81513")]
291 #[inline]
292 pub const fn from_raw_parts(
293 data_pointer: NonNull<impl super::Thin>,
294 metadata: <T as super::Pointee>::Metadata,
295 ) -> NonNull<T> {
296 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
297 unsafe {
298 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
299 }
300 }
301
302 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
303 ///
304 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
305 #[unstable(feature = "ptr_metadata", issue = "81513")]
306 #[must_use = "this returns the result of the operation, \
307 without modifying the original"]
308 #[inline]
309 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
310 (self.cast(), super::metadata(self.as_ptr()))
311 }
312
313 /// Gets the "address" portion of the pointer.
314 ///
315 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
316 ///
317 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
318 #[must_use]
319 #[inline]
320 #[stable(feature = "strict_provenance", since = "1.84.0")]
321 pub fn addr(self) -> NonZero<usize> {
322 // SAFETY: The pointer is guaranteed by the type to be non-null,
323 // meaning that the address will be non-zero.
324 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
325 }
326
327 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
328 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
329 ///
330 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
331 ///
332 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
333 #[stable(feature = "nonnull_provenance", since = "CURRENT_RUSTC_VERSION")]
334 pub fn expose_provenance(self) -> NonZero<usize> {
335 // SAFETY: The pointer is guaranteed by the type to be non-null,
336 // meaning that the address will be non-zero.
337 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
338 }
339
340 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
341 /// `self`.
342 ///
343 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
344 ///
345 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
346 #[must_use]
347 #[inline]
348 #[stable(feature = "strict_provenance", since = "1.84.0")]
349 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
350 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
351 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
352 }
353
354 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
355 /// [provenance][crate::ptr#provenance] of `self`.
356 ///
357 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
358 ///
359 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
360 #[must_use]
361 #[inline]
362 #[stable(feature = "strict_provenance", since = "1.84.0")]
363 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
364 self.with_addr(f(self.addr()))
365 }
366
367 /// Acquires the underlying `*mut` pointer.
368 ///
369 /// # Examples
370 ///
371 /// ```
372 /// use std::ptr::NonNull;
373 ///
374 /// let mut x = 0u32;
375 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
376 ///
377 /// let x_value = unsafe { *ptr.as_ptr() };
378 /// assert_eq!(x_value, 0);
379 ///
380 /// unsafe { *ptr.as_ptr() += 2; }
381 /// let x_value = unsafe { *ptr.as_ptr() };
382 /// assert_eq!(x_value, 2);
383 /// ```
384 #[stable(feature = "nonnull", since = "1.25.0")]
385 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
386 #[rustc_never_returns_null_ptr]
387 #[must_use]
388 #[inline(always)]
389 pub const fn as_ptr(self) -> *mut T {
390 // This is a transmute for the same reasons as `NonZero::get`.
391
392 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
393 // and `*mut T` have the same layout, so transitively we can transmute
394 // our `NonNull` to a `*mut T` directly.
395 unsafe { mem::transmute::<Self, *mut T>(self) }
396 }
397
398 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
399 /// must be used instead.
400 ///
401 /// For the mutable counterpart see [`as_mut`].
402 ///
403 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
404 /// [`as_mut`]: NonNull::as_mut
405 ///
406 /// # Safety
407 ///
408 /// When calling this method, you have to ensure that
409 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
410 ///
411 /// # Examples
412 ///
413 /// ```
414 /// use std::ptr::NonNull;
415 ///
416 /// let mut x = 0u32;
417 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
418 ///
419 /// let ref_x = unsafe { ptr.as_ref() };
420 /// println!("{ref_x}");
421 /// ```
422 ///
423 /// [the module documentation]: crate::ptr#safety
424 #[stable(feature = "nonnull", since = "1.25.0")]
425 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
426 #[must_use]
427 #[inline(always)]
428 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
429 // SAFETY: the caller must guarantee that `self` meets all the
430 // requirements for a reference.
431 // `cast_const` avoids a mutable raw pointer deref.
432 unsafe { &*self.as_ptr().cast_const() }
433 }
434
435 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
436 /// must be used instead.
437 ///
438 /// For the shared counterpart see [`as_ref`].
439 ///
440 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
441 /// [`as_ref`]: NonNull::as_ref
442 ///
443 /// # Safety
444 ///
445 /// When calling this method, you have to ensure that
446 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
447 /// # Examples
448 ///
449 /// ```
450 /// use std::ptr::NonNull;
451 ///
452 /// let mut x = 0u32;
453 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
454 ///
455 /// let x_ref = unsafe { ptr.as_mut() };
456 /// assert_eq!(*x_ref, 0);
457 /// *x_ref += 2;
458 /// assert_eq!(*x_ref, 2);
459 /// ```
460 ///
461 /// [the module documentation]: crate::ptr#safety
462 #[stable(feature = "nonnull", since = "1.25.0")]
463 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
464 #[must_use]
465 #[inline(always)]
466 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
467 // SAFETY: the caller must guarantee that `self` meets all the
468 // requirements for a mutable reference.
469 unsafe { &mut *self.as_ptr() }
470 }
471
472 /// Casts to a pointer of another type.
473 ///
474 /// # Examples
475 ///
476 /// ```
477 /// use std::ptr::NonNull;
478 ///
479 /// let mut x = 0u32;
480 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
481 ///
482 /// let casted_ptr = ptr.cast::<i8>();
483 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
484 /// ```
485 #[stable(feature = "nonnull_cast", since = "1.27.0")]
486 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
487 #[must_use = "this returns the result of the operation, \
488 without modifying the original"]
489 #[inline]
490 pub const fn cast<U>(self) -> NonNull<U> {
491 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
492 unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
493 }
494
495 /// Try to cast to a pointer of another type by checking aligment.
496 ///
497 /// If the pointer is properly aligned to the target type, it will be
498 /// cast to the target type. Otherwise, `None` is returned.
499 ///
500 /// # Examples
501 ///
502 /// ```rust
503 /// #![feature(pointer_try_cast_aligned)]
504 /// use std::ptr::NonNull;
505 ///
506 /// let mut x = 0u64;
507 ///
508 /// let aligned = NonNull::from_mut(&mut x);
509 /// let unaligned = unsafe { aligned.byte_add(1) };
510 ///
511 /// assert!(aligned.try_cast_aligned::<u32>().is_some());
512 /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
513 /// ```
514 #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
515 #[must_use = "this returns the result of the operation, \
516 without modifying the original"]
517 #[inline]
518 pub fn try_cast_aligned<U>(self) -> Option<NonNull<U>> {
519 if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
520 }
521
522 /// Adds an offset to a pointer.
523 ///
524 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
525 /// offset of `3 * size_of::<T>()` bytes.
526 ///
527 /// # Safety
528 ///
529 /// If any of the following conditions are violated, the result is Undefined Behavior:
530 ///
531 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
532 ///
533 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
534 /// [allocation], and the entire memory range between `self` and the result must be in
535 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
536 /// of the address space.
537 ///
538 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
539 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
540 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
541 /// safe.
542 ///
543 /// [allocation]: crate::ptr#allocation
544 ///
545 /// # Examples
546 ///
547 /// ```
548 /// use std::ptr::NonNull;
549 ///
550 /// let mut s = [1, 2, 3];
551 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
552 ///
553 /// unsafe {
554 /// println!("{}", ptr.offset(1).read());
555 /// println!("{}", ptr.offset(2).read());
556 /// }
557 /// ```
558 #[inline(always)]
559 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
560 #[must_use = "returns a new pointer rather than modifying its argument"]
561 #[stable(feature = "non_null_convenience", since = "1.80.0")]
562 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
563 pub const unsafe fn offset(self, count: isize) -> Self
564 where
565 T: Sized,
566 {
567 // SAFETY: the caller must uphold the safety contract for `offset`.
568 // Additionally safety contract of `offset` guarantees that the resulting pointer is
569 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
570 // construct `NonNull`.
571 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
572 }
573
574 /// Calculates the offset from a pointer in bytes.
575 ///
576 /// `count` is in units of **bytes**.
577 ///
578 /// This is purely a convenience for casting to a `u8` pointer and
579 /// using [offset][pointer::offset] on it. See that method for documentation
580 /// and safety requirements.
581 ///
582 /// For non-`Sized` pointees this operation changes only the data pointer,
583 /// leaving the metadata untouched.
584 #[must_use]
585 #[inline(always)]
586 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
587 #[stable(feature = "non_null_convenience", since = "1.80.0")]
588 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
589 pub const unsafe fn byte_offset(self, count: isize) -> Self {
590 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
591 // the same safety contract.
592 // Additionally safety contract of `offset` guarantees that the resulting pointer is
593 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
594 // construct `NonNull`.
595 unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
596 }
597
598 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
599 ///
600 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
601 /// offset of `3 * size_of::<T>()` bytes.
602 ///
603 /// # Safety
604 ///
605 /// If any of the following conditions are violated, the result is Undefined Behavior:
606 ///
607 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
608 ///
609 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
610 /// [allocation], and the entire memory range between `self` and the result must be in
611 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
612 /// of the address space.
613 ///
614 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
615 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
616 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
617 /// safe.
618 ///
619 /// [allocation]: crate::ptr#allocation
620 ///
621 /// # Examples
622 ///
623 /// ```
624 /// use std::ptr::NonNull;
625 ///
626 /// let s: &str = "123";
627 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
628 ///
629 /// unsafe {
630 /// println!("{}", ptr.add(1).read() as char);
631 /// println!("{}", ptr.add(2).read() as char);
632 /// }
633 /// ```
634 #[inline(always)]
635 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
636 #[must_use = "returns a new pointer rather than modifying its argument"]
637 #[stable(feature = "non_null_convenience", since = "1.80.0")]
638 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
639 pub const unsafe fn add(self, count: usize) -> Self
640 where
641 T: Sized,
642 {
643 // SAFETY: the caller must uphold the safety contract for `offset`.
644 // Additionally safety contract of `offset` guarantees that the resulting pointer is
645 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
646 // construct `NonNull`.
647 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
648 }
649
650 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
651 ///
652 /// `count` is in units of bytes.
653 ///
654 /// This is purely a convenience for casting to a `u8` pointer and
655 /// using [`add`][NonNull::add] on it. See that method for documentation
656 /// and safety requirements.
657 ///
658 /// For non-`Sized` pointees this operation changes only the data pointer,
659 /// leaving the metadata untouched.
660 #[must_use]
661 #[inline(always)]
662 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
663 #[stable(feature = "non_null_convenience", since = "1.80.0")]
664 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
665 pub const unsafe fn byte_add(self, count: usize) -> Self {
666 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
667 // safety contract.
668 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
669 // to an allocation, there can't be an allocation at null, thus it's safe to construct
670 // `NonNull`.
671 unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
672 }
673
674 /// Subtracts an offset from a pointer (convenience for
675 /// `.offset((count as isize).wrapping_neg())`).
676 ///
677 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
678 /// offset of `3 * size_of::<T>()` bytes.
679 ///
680 /// # Safety
681 ///
682 /// If any of the following conditions are violated, the result is Undefined Behavior:
683 ///
684 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
685 ///
686 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
687 /// [allocation], and the entire memory range between `self` and the result must be in
688 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
689 /// of the address space.
690 ///
691 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
692 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
693 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
694 /// safe.
695 ///
696 /// [allocation]: crate::ptr#allocation
697 ///
698 /// # Examples
699 ///
700 /// ```
701 /// use std::ptr::NonNull;
702 ///
703 /// let s: &str = "123";
704 ///
705 /// unsafe {
706 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
707 /// println!("{}", end.sub(1).read() as char);
708 /// println!("{}", end.sub(2).read() as char);
709 /// }
710 /// ```
711 #[inline(always)]
712 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
713 #[must_use = "returns a new pointer rather than modifying its argument"]
714 #[stable(feature = "non_null_convenience", since = "1.80.0")]
715 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
716 pub const unsafe fn sub(self, count: usize) -> Self
717 where
718 T: Sized,
719 {
720 if T::IS_ZST {
721 // Pointer arithmetic does nothing when the pointee is a ZST.
722 self
723 } else {
724 // SAFETY: the caller must uphold the safety contract for `offset`.
725 // Because the pointee is *not* a ZST, that means that `count` is
726 // at most `isize::MAX`, and thus the negation cannot overflow.
727 unsafe { self.offset((count as isize).unchecked_neg()) }
728 }
729 }
730
731 /// Calculates the offset from a pointer in bytes (convenience for
732 /// `.byte_offset((count as isize).wrapping_neg())`).
733 ///
734 /// `count` is in units of bytes.
735 ///
736 /// This is purely a convenience for casting to a `u8` pointer and
737 /// using [`sub`][NonNull::sub] on it. See that method for documentation
738 /// and safety requirements.
739 ///
740 /// For non-`Sized` pointees this operation changes only the data pointer,
741 /// leaving the metadata untouched.
742 #[must_use]
743 #[inline(always)]
744 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
745 #[stable(feature = "non_null_convenience", since = "1.80.0")]
746 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
747 pub const unsafe fn byte_sub(self, count: usize) -> Self {
748 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
749 // safety contract.
750 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
751 // to an allocation, there can't be an allocation at null, thus it's safe to construct
752 // `NonNull`.
753 unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
754 }
755
756 /// Calculates the distance between two pointers within the same allocation. The returned value is in
757 /// units of T: the distance in bytes divided by `size_of::<T>()`.
758 ///
759 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
760 /// except that it has a lot more opportunities for UB, in exchange for the compiler
761 /// better understanding what you are doing.
762 ///
763 /// The primary motivation of this method is for computing the `len` of an array/slice
764 /// of `T` that you are currently representing as a "start" and "end" pointer
765 /// (and "end" is "one past the end" of the array).
766 /// In that case, `end.offset_from(start)` gets you the length of the array.
767 ///
768 /// All of the following safety requirements are trivially satisfied for this usecase.
769 ///
770 /// [`offset`]: #method.offset
771 ///
772 /// # Safety
773 ///
774 /// If any of the following conditions are violated, the result is Undefined Behavior:
775 ///
776 /// * `self` and `origin` must either
777 ///
778 /// * point to the same address, or
779 /// * both be *derived from* a pointer to the same [allocation], and the memory range between
780 /// the two pointers must be in bounds of that object. (See below for an example.)
781 ///
782 /// * The distance between the pointers, in bytes, must be an exact multiple
783 /// of the size of `T`.
784 ///
785 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
786 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
787 /// implied by the in-bounds requirement, and the fact that no allocation can be larger
788 /// than `isize::MAX` bytes.
789 ///
790 /// The requirement for pointers to be derived from the same allocation is primarily
791 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
792 /// objects is not known at compile-time. However, the requirement also exists at
793 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
794 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
795 /// origin as isize) / size_of::<T>()`.
796 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
797 ///
798 /// [`add`]: #method.add
799 /// [allocation]: crate::ptr#allocation
800 ///
801 /// # Panics
802 ///
803 /// This function panics if `T` is a Zero-Sized Type ("ZST").
804 ///
805 /// # Examples
806 ///
807 /// Basic usage:
808 ///
809 /// ```
810 /// use std::ptr::NonNull;
811 ///
812 /// let a = [0; 5];
813 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
814 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
815 /// unsafe {
816 /// assert_eq!(ptr2.offset_from(ptr1), 2);
817 /// assert_eq!(ptr1.offset_from(ptr2), -2);
818 /// assert_eq!(ptr1.offset(2), ptr2);
819 /// assert_eq!(ptr2.offset(-2), ptr1);
820 /// }
821 /// ```
822 ///
823 /// *Incorrect* usage:
824 ///
825 /// ```rust,no_run
826 /// use std::ptr::NonNull;
827 ///
828 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
829 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
830 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
831 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
832 /// let diff_plus_1 = diff.wrapping_add(1);
833 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
834 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
835 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
836 /// // computing their offset is undefined behavior, even though
837 /// // they point to addresses that are in-bounds of the same object!
838 ///
839 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
840 /// ```
841 #[inline]
842 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
843 #[stable(feature = "non_null_convenience", since = "1.80.0")]
844 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
845 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
846 where
847 T: Sized,
848 {
849 // SAFETY: the caller must uphold the safety contract for `offset_from`.
850 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
851 }
852
853 /// Calculates the distance between two pointers within the same allocation. The returned value is in
854 /// units of **bytes**.
855 ///
856 /// This is purely a convenience for casting to a `u8` pointer and
857 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
858 /// documentation and safety requirements.
859 ///
860 /// For non-`Sized` pointees this operation considers only the data pointers,
861 /// ignoring the metadata.
862 #[inline(always)]
863 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
864 #[stable(feature = "non_null_convenience", since = "1.80.0")]
865 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
866 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
867 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
868 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
869 }
870
871 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
872
873 /// Calculates the distance between two pointers within the same allocation, *where it's known that
874 /// `self` is equal to or greater than `origin`*. The returned value is in
875 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
876 ///
877 /// This computes the same value that [`offset_from`](#method.offset_from)
878 /// would compute, but with the added precondition that the offset is
879 /// guaranteed to be non-negative. This method is equivalent to
880 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
881 /// but it provides slightly more information to the optimizer, which can
882 /// sometimes allow it to optimize slightly better with some backends.
883 ///
884 /// This method can be though of as recovering the `count` that was passed
885 /// to [`add`](#method.add) (or, with the parameters in the other order,
886 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
887 /// that their safety preconditions are met:
888 /// ```rust
889 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
890 /// ptr.offset_from_unsigned(origin) == count
891 /// # &&
892 /// origin.add(count) == ptr
893 /// # &&
894 /// ptr.sub(count) == origin
895 /// # } }
896 /// ```
897 ///
898 /// # Safety
899 ///
900 /// - The distance between the pointers must be non-negative (`self >= origin`)
901 ///
902 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
903 /// apply to this method as well; see it for the full details.
904 ///
905 /// Importantly, despite the return type of this method being able to represent
906 /// a larger offset, it's still *not permitted* to pass pointers which differ
907 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
908 /// always be less than or equal to `isize::MAX as usize`.
909 ///
910 /// # Panics
911 ///
912 /// This function panics if `T` is a Zero-Sized Type ("ZST").
913 ///
914 /// # Examples
915 ///
916 /// ```
917 /// use std::ptr::NonNull;
918 ///
919 /// let a = [0; 5];
920 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
921 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
922 /// unsafe {
923 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
924 /// assert_eq!(ptr1.add(2), ptr2);
925 /// assert_eq!(ptr2.sub(2), ptr1);
926 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
927 /// }
928 ///
929 /// // This would be incorrect, as the pointers are not correctly ordered:
930 /// // ptr1.offset_from_unsigned(ptr2)
931 /// ```
932 #[inline]
933 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
934 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
935 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
936 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
937 where
938 T: Sized,
939 {
940 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
941 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
942 }
943
944 /// Calculates the distance between two pointers within the same allocation, *where it's known that
945 /// `self` is equal to or greater than `origin`*. The returned value is in
946 /// units of **bytes**.
947 ///
948 /// This is purely a convenience for casting to a `u8` pointer and
949 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
950 /// See that method for documentation and safety requirements.
951 ///
952 /// For non-`Sized` pointees this operation considers only the data pointers,
953 /// ignoring the metadata.
954 #[inline(always)]
955 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
956 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
957 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
958 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
959 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
960 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
961 }
962
963 /// Reads the value from `self` without moving it. This leaves the
964 /// memory in `self` unchanged.
965 ///
966 /// See [`ptr::read`] for safety concerns and examples.
967 ///
968 /// [`ptr::read`]: crate::ptr::read()
969 #[inline]
970 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
971 #[stable(feature = "non_null_convenience", since = "1.80.0")]
972 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
973 pub const unsafe fn read(self) -> T
974 where
975 T: Sized,
976 {
977 // SAFETY: the caller must uphold the safety contract for `read`.
978 unsafe { ptr::read(self.as_ptr()) }
979 }
980
981 /// Performs a volatile read of the value from `self` without moving it. This
982 /// leaves the memory in `self` unchanged.
983 ///
984 /// Volatile operations are intended to act on I/O memory, and are guaranteed
985 /// to not be elided or reordered by the compiler across other volatile
986 /// operations.
987 ///
988 /// See [`ptr::read_volatile`] for safety concerns and examples.
989 ///
990 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
991 #[inline]
992 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
993 #[stable(feature = "non_null_convenience", since = "1.80.0")]
994 pub unsafe fn read_volatile(self) -> T
995 where
996 T: Sized,
997 {
998 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
999 unsafe { ptr::read_volatile(self.as_ptr()) }
1000 }
1001
1002 /// Reads the value from `self` without moving it. This leaves the
1003 /// memory in `self` unchanged.
1004 ///
1005 /// Unlike `read`, the pointer may be unaligned.
1006 ///
1007 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1008 ///
1009 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1010 #[inline]
1011 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1012 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1013 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
1014 pub const unsafe fn read_unaligned(self) -> T
1015 where
1016 T: Sized,
1017 {
1018 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1019 unsafe { ptr::read_unaligned(self.as_ptr()) }
1020 }
1021
1022 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1023 /// and destination may overlap.
1024 ///
1025 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1026 ///
1027 /// See [`ptr::copy`] for safety concerns and examples.
1028 ///
1029 /// [`ptr::copy`]: crate::ptr::copy()
1030 #[inline(always)]
1031 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1032 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1033 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1034 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1035 where
1036 T: Sized,
1037 {
1038 // SAFETY: the caller must uphold the safety contract for `copy`.
1039 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1040 }
1041
1042 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1043 /// and destination may *not* overlap.
1044 ///
1045 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1046 ///
1047 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1048 ///
1049 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1050 #[inline(always)]
1051 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1052 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1053 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1054 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1055 where
1056 T: Sized,
1057 {
1058 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1059 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1060 }
1061
1062 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1063 /// and destination may overlap.
1064 ///
1065 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1066 ///
1067 /// See [`ptr::copy`] for safety concerns and examples.
1068 ///
1069 /// [`ptr::copy`]: crate::ptr::copy()
1070 #[inline(always)]
1071 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1072 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1073 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1074 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1075 where
1076 T: Sized,
1077 {
1078 // SAFETY: the caller must uphold the safety contract for `copy`.
1079 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1080 }
1081
1082 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1083 /// and destination may *not* overlap.
1084 ///
1085 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1086 ///
1087 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1088 ///
1089 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1090 #[inline(always)]
1091 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1092 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1093 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1094 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1095 where
1096 T: Sized,
1097 {
1098 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1099 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1100 }
1101
1102 /// Executes the destructor (if any) of the pointed-to value.
1103 ///
1104 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1105 ///
1106 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1107 #[inline(always)]
1108 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1109 pub unsafe fn drop_in_place(self) {
1110 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1111 unsafe { ptr::drop_in_place(self.as_ptr()) }
1112 }
1113
1114 /// Overwrites a memory location with the given value without reading or
1115 /// dropping the old value.
1116 ///
1117 /// See [`ptr::write`] for safety concerns and examples.
1118 ///
1119 /// [`ptr::write`]: crate::ptr::write()
1120 #[inline(always)]
1121 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1122 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1123 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1124 pub const unsafe fn write(self, val: T)
1125 where
1126 T: Sized,
1127 {
1128 // SAFETY: the caller must uphold the safety contract for `write`.
1129 unsafe { ptr::write(self.as_ptr(), val) }
1130 }
1131
1132 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1133 /// bytes of memory starting at `self` to `val`.
1134 ///
1135 /// See [`ptr::write_bytes`] for safety concerns and examples.
1136 ///
1137 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1138 #[inline(always)]
1139 #[doc(alias = "memset")]
1140 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1141 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1142 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1143 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1144 where
1145 T: Sized,
1146 {
1147 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1148 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1149 }
1150
1151 /// Performs a volatile write of a memory location with the given value without
1152 /// reading or dropping the old value.
1153 ///
1154 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1155 /// to not be elided or reordered by the compiler across other volatile
1156 /// operations.
1157 ///
1158 /// See [`ptr::write_volatile`] for safety concerns and examples.
1159 ///
1160 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1161 #[inline(always)]
1162 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1163 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1164 pub unsafe fn write_volatile(self, val: T)
1165 where
1166 T: Sized,
1167 {
1168 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1169 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1170 }
1171
1172 /// Overwrites a memory location with the given value without reading or
1173 /// dropping the old value.
1174 ///
1175 /// Unlike `write`, the pointer may be unaligned.
1176 ///
1177 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1178 ///
1179 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1180 #[inline(always)]
1181 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1182 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1183 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1184 pub const unsafe fn write_unaligned(self, val: T)
1185 where
1186 T: Sized,
1187 {
1188 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1189 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1190 }
1191
1192 /// Replaces the value at `self` with `src`, returning the old
1193 /// value, without dropping either.
1194 ///
1195 /// See [`ptr::replace`] for safety concerns and examples.
1196 ///
1197 /// [`ptr::replace`]: crate::ptr::replace()
1198 #[inline(always)]
1199 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1200 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1201 pub const unsafe fn replace(self, src: T) -> T
1202 where
1203 T: Sized,
1204 {
1205 // SAFETY: the caller must uphold the safety contract for `replace`.
1206 unsafe { ptr::replace(self.as_ptr(), src) }
1207 }
1208
1209 /// Swaps the values at two mutable locations of the same type, without
1210 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1211 /// otherwise equivalent.
1212 ///
1213 /// See [`ptr::swap`] for safety concerns and examples.
1214 ///
1215 /// [`ptr::swap`]: crate::ptr::swap()
1216 #[inline(always)]
1217 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1218 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1219 pub const unsafe fn swap(self, with: NonNull<T>)
1220 where
1221 T: Sized,
1222 {
1223 // SAFETY: the caller must uphold the safety contract for `swap`.
1224 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1225 }
1226
1227 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1228 /// `align`.
1229 ///
1230 /// If it is not possible to align the pointer, the implementation returns
1231 /// `usize::MAX`.
1232 ///
1233 /// The offset is expressed in number of `T` elements, and not bytes.
1234 ///
1235 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1236 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1237 /// the returned offset is correct in all terms other than alignment.
1238 ///
1239 /// When this is called during compile-time evaluation (which is unstable), the implementation
1240 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1241 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1242 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1243 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1244 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1245 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1246 /// for unstable APIs.)
1247 ///
1248 /// # Panics
1249 ///
1250 /// The function panics if `align` is not a power-of-two.
1251 ///
1252 /// # Examples
1253 ///
1254 /// Accessing adjacent `u8` as `u16`
1255 ///
1256 /// ```
1257 /// use std::ptr::NonNull;
1258 ///
1259 /// # unsafe {
1260 /// let x = [5_u8, 6, 7, 8, 9];
1261 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1262 /// let offset = ptr.align_offset(align_of::<u16>());
1263 ///
1264 /// if offset < x.len() - 1 {
1265 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1266 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1267 /// } else {
1268 /// // while the pointer can be aligned via `offset`, it would point
1269 /// // outside the allocation
1270 /// }
1271 /// # }
1272 /// ```
1273 #[inline]
1274 #[must_use]
1275 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1276 pub fn align_offset(self, align: usize) -> usize
1277 where
1278 T: Sized,
1279 {
1280 if !align.is_power_of_two() {
1281 panic!("align_offset: align is not a power-of-two");
1282 }
1283
1284 {
1285 // SAFETY: `align` has been checked to be a power of 2 above.
1286 unsafe { ptr::align_offset(self.as_ptr(), align) }
1287 }
1288 }
1289
1290 /// Returns whether the pointer is properly aligned for `T`.
1291 ///
1292 /// # Examples
1293 ///
1294 /// ```
1295 /// use std::ptr::NonNull;
1296 ///
1297 /// // On some platforms, the alignment of i32 is less than 4.
1298 /// #[repr(align(4))]
1299 /// struct AlignedI32(i32);
1300 ///
1301 /// let data = AlignedI32(42);
1302 /// let ptr = NonNull::<AlignedI32>::from(&data);
1303 ///
1304 /// assert!(ptr.is_aligned());
1305 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1306 /// ```
1307 #[inline]
1308 #[must_use]
1309 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1310 pub fn is_aligned(self) -> bool
1311 where
1312 T: Sized,
1313 {
1314 self.as_ptr().is_aligned()
1315 }
1316
1317 /// Returns whether the pointer is aligned to `align`.
1318 ///
1319 /// For non-`Sized` pointees this operation considers only the data pointer,
1320 /// ignoring the metadata.
1321 ///
1322 /// # Panics
1323 ///
1324 /// The function panics if `align` is not a power-of-two (this includes 0).
1325 ///
1326 /// # Examples
1327 ///
1328 /// ```
1329 /// #![feature(pointer_is_aligned_to)]
1330 ///
1331 /// // On some platforms, the alignment of i32 is less than 4.
1332 /// #[repr(align(4))]
1333 /// struct AlignedI32(i32);
1334 ///
1335 /// let data = AlignedI32(42);
1336 /// let ptr = &data as *const AlignedI32;
1337 ///
1338 /// assert!(ptr.is_aligned_to(1));
1339 /// assert!(ptr.is_aligned_to(2));
1340 /// assert!(ptr.is_aligned_to(4));
1341 ///
1342 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1343 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1344 ///
1345 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1346 /// ```
1347 #[inline]
1348 #[must_use]
1349 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1350 pub fn is_aligned_to(self, align: usize) -> bool {
1351 self.as_ptr().is_aligned_to(align)
1352 }
1353}
1354
1355impl<T> NonNull<[T]> {
1356 /// Creates a non-null raw slice from a thin pointer and a length.
1357 ///
1358 /// The `len` argument is the number of **elements**, not the number of bytes.
1359 ///
1360 /// This function is safe, but dereferencing the return value is unsafe.
1361 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1362 ///
1363 /// # Examples
1364 ///
1365 /// ```rust
1366 /// use std::ptr::NonNull;
1367 ///
1368 /// // create a slice pointer when starting out with a pointer to the first element
1369 /// let mut x = [5, 6, 7];
1370 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1371 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1372 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1373 /// ```
1374 ///
1375 /// (Note that this example artificially demonstrates a use of this method,
1376 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1377 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1378 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1379 #[must_use]
1380 #[inline]
1381 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1382 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1383 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1384 }
1385
1386 /// Returns the length of a non-null raw slice.
1387 ///
1388 /// The returned value is the number of **elements**, not the number of bytes.
1389 ///
1390 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1391 /// because the pointer does not have a valid address.
1392 ///
1393 /// # Examples
1394 ///
1395 /// ```rust
1396 /// use std::ptr::NonNull;
1397 ///
1398 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1399 /// assert_eq!(slice.len(), 3);
1400 /// ```
1401 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1402 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1403 #[must_use]
1404 #[inline]
1405 pub const fn len(self) -> usize {
1406 self.as_ptr().len()
1407 }
1408
1409 /// Returns `true` if the non-null raw slice has a length of 0.
1410 ///
1411 /// # Examples
1412 ///
1413 /// ```rust
1414 /// use std::ptr::NonNull;
1415 ///
1416 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1417 /// assert!(!slice.is_empty());
1418 /// ```
1419 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1420 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1421 #[must_use]
1422 #[inline]
1423 pub const fn is_empty(self) -> bool {
1424 self.len() == 0
1425 }
1426
1427 /// Returns a non-null pointer to the slice's buffer.
1428 ///
1429 /// # Examples
1430 ///
1431 /// ```rust
1432 /// #![feature(slice_ptr_get)]
1433 /// use std::ptr::NonNull;
1434 ///
1435 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1436 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1437 /// ```
1438 #[inline]
1439 #[must_use]
1440 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1441 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1442 self.cast()
1443 }
1444
1445 /// Returns a raw pointer to the slice's buffer.
1446 ///
1447 /// # Examples
1448 ///
1449 /// ```rust
1450 /// #![feature(slice_ptr_get)]
1451 /// use std::ptr::NonNull;
1452 ///
1453 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1454 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1455 /// ```
1456 #[inline]
1457 #[must_use]
1458 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1459 #[rustc_never_returns_null_ptr]
1460 pub const fn as_mut_ptr(self) -> *mut T {
1461 self.as_non_null_ptr().as_ptr()
1462 }
1463
1464 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1465 /// [`as_ref`], this does not require that the value has to be initialized.
1466 ///
1467 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1468 ///
1469 /// [`as_ref`]: NonNull::as_ref
1470 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1471 ///
1472 /// # Safety
1473 ///
1474 /// When calling this method, you have to ensure that all of the following is true:
1475 ///
1476 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1477 /// and it must be properly aligned. This means in particular:
1478 ///
1479 /// * The entire memory range of this slice must be contained within a single allocation!
1480 /// Slices can never span across multiple allocations.
1481 ///
1482 /// * The pointer must be aligned even for zero-length slices. One
1483 /// reason for this is that enum layout optimizations may rely on references
1484 /// (including slices of any length) being aligned and non-null to distinguish
1485 /// them from other data. You can obtain a pointer that is usable as `data`
1486 /// for zero-length slices using [`NonNull::dangling()`].
1487 ///
1488 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1489 /// See the safety documentation of [`pointer::offset`].
1490 ///
1491 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1492 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1493 /// In particular, while this reference exists, the memory the pointer points to must
1494 /// not get mutated (except inside `UnsafeCell`).
1495 ///
1496 /// This applies even if the result of this method is unused!
1497 ///
1498 /// See also [`slice::from_raw_parts`].
1499 ///
1500 /// [valid]: crate::ptr#safety
1501 #[inline]
1502 #[must_use]
1503 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1504 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1505 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1506 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1507 }
1508
1509 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1510 /// [`as_mut`], this does not require that the value has to be initialized.
1511 ///
1512 /// For the shared counterpart see [`as_uninit_slice`].
1513 ///
1514 /// [`as_mut`]: NonNull::as_mut
1515 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1516 ///
1517 /// # Safety
1518 ///
1519 /// When calling this method, you have to ensure that all of the following is true:
1520 ///
1521 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1522 /// many bytes, and it must be properly aligned. This means in particular:
1523 ///
1524 /// * The entire memory range of this slice must be contained within a single allocation!
1525 /// Slices can never span across multiple allocations.
1526 ///
1527 /// * The pointer must be aligned even for zero-length slices. One
1528 /// reason for this is that enum layout optimizations may rely on references
1529 /// (including slices of any length) being aligned and non-null to distinguish
1530 /// them from other data. You can obtain a pointer that is usable as `data`
1531 /// for zero-length slices using [`NonNull::dangling()`].
1532 ///
1533 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1534 /// See the safety documentation of [`pointer::offset`].
1535 ///
1536 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1537 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1538 /// In particular, while this reference exists, the memory the pointer points to must
1539 /// not get accessed (read or written) through any other pointer.
1540 ///
1541 /// This applies even if the result of this method is unused!
1542 ///
1543 /// See also [`slice::from_raw_parts_mut`].
1544 ///
1545 /// [valid]: crate::ptr#safety
1546 ///
1547 /// # Examples
1548 ///
1549 /// ```rust
1550 /// #![feature(allocator_api, ptr_as_uninit)]
1551 ///
1552 /// use std::alloc::{Allocator, Layout, Global};
1553 /// use std::mem::MaybeUninit;
1554 /// use std::ptr::NonNull;
1555 ///
1556 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1557 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1558 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1559 /// # #[allow(unused_variables)]
1560 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1561 /// # // Prevent leaks for Miri.
1562 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1563 /// # Ok::<_, std::alloc::AllocError>(())
1564 /// ```
1565 #[inline]
1566 #[must_use]
1567 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1568 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1569 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1570 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1571 }
1572
1573 /// Returns a raw pointer to an element or subslice, without doing bounds
1574 /// checking.
1575 ///
1576 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1577 /// is *[undefined behavior]* even if the resulting pointer is not used.
1578 ///
1579 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1580 ///
1581 /// # Examples
1582 ///
1583 /// ```
1584 /// #![feature(slice_ptr_get)]
1585 /// use std::ptr::NonNull;
1586 ///
1587 /// let x = &mut [1, 2, 4];
1588 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1589 ///
1590 /// unsafe {
1591 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1592 /// }
1593 /// ```
1594 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1595 #[inline]
1596 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1597 where
1598 I: SliceIndex<[T]>,
1599 {
1600 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1601 // As a consequence, the resulting pointer cannot be null.
1602 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1603 }
1604}
1605
1606#[stable(feature = "nonnull", since = "1.25.0")]
1607impl<T: ?Sized> Clone for NonNull<T> {
1608 #[inline(always)]
1609 fn clone(&self) -> Self {
1610 *self
1611 }
1612}
1613
1614#[stable(feature = "nonnull", since = "1.25.0")]
1615impl<T: ?Sized> Copy for NonNull<T> {}
1616
1617#[unstable(feature = "coerce_unsized", issue = "18598")]
1618impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1619
1620#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1621impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1622
1623#[stable(feature = "pin", since = "1.33.0")]
1624unsafe impl<T: ?Sized> PinCoerceUnsized for NonNull<T> {}
1625
1626#[unstable(feature = "pointer_like_trait", issue = "none")]
1627impl<T> core::marker::PointerLike for NonNull<T> {}
1628
1629#[stable(feature = "nonnull", since = "1.25.0")]
1630impl<T: ?Sized> fmt::Debug for NonNull<T> {
1631 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1632 fmt::Pointer::fmt(&self.as_ptr(), f)
1633 }
1634}
1635
1636#[stable(feature = "nonnull", since = "1.25.0")]
1637impl<T: ?Sized> fmt::Pointer for NonNull<T> {
1638 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1639 fmt::Pointer::fmt(&self.as_ptr(), f)
1640 }
1641}
1642
1643#[stable(feature = "nonnull", since = "1.25.0")]
1644impl<T: ?Sized> Eq for NonNull<T> {}
1645
1646#[stable(feature = "nonnull", since = "1.25.0")]
1647impl<T: ?Sized> PartialEq for NonNull<T> {
1648 #[inline]
1649 #[allow(ambiguous_wide_pointer_comparisons)]
1650 fn eq(&self, other: &Self) -> bool {
1651 self.as_ptr() == other.as_ptr()
1652 }
1653}
1654
1655#[stable(feature = "nonnull", since = "1.25.0")]
1656impl<T: ?Sized> Ord for NonNull<T> {
1657 #[inline]
1658 #[allow(ambiguous_wide_pointer_comparisons)]
1659 fn cmp(&self, other: &Self) -> Ordering {
1660 self.as_ptr().cmp(&other.as_ptr())
1661 }
1662}
1663
1664#[stable(feature = "nonnull", since = "1.25.0")]
1665impl<T: ?Sized> PartialOrd for NonNull<T> {
1666 #[inline]
1667 #[allow(ambiguous_wide_pointer_comparisons)]
1668 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1669 self.as_ptr().partial_cmp(&other.as_ptr())
1670 }
1671}
1672
1673#[stable(feature = "nonnull", since = "1.25.0")]
1674impl<T: ?Sized> hash::Hash for NonNull<T> {
1675 #[inline]
1676 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1677 self.as_ptr().hash(state)
1678 }
1679}
1680
1681#[unstable(feature = "ptr_internals", issue = "none")]
1682impl<T: ?Sized> From<Unique<T>> for NonNull<T> {
1683 #[inline]
1684 fn from(unique: Unique<T>) -> Self {
1685 unique.as_non_null_ptr()
1686 }
1687}
1688
1689#[stable(feature = "nonnull", since = "1.25.0")]
1690impl<T: ?Sized> From<&mut T> for NonNull<T> {
1691 /// Converts a `&mut T` to a `NonNull<T>`.
1692 ///
1693 /// This conversion is safe and infallible since references cannot be null.
1694 #[inline]
1695 fn from(r: &mut T) -> Self {
1696 NonNull::from_mut(r)
1697 }
1698}
1699
1700#[stable(feature = "nonnull", since = "1.25.0")]
1701impl<T: ?Sized> From<&T> for NonNull<T> {
1702 /// Converts a `&T` to a `NonNull<T>`.
1703 ///
1704 /// This conversion is safe and infallible since references cannot be null.
1705 #[inline]
1706 fn from(r: &T) -> Self {
1707 NonNull::from_ref(r)
1708 }
1709}
1710