core/ptr/non_null.rs
1use crate::cmp::Ordering;
2use crate::marker::Unsize;
3use crate::mem::{MaybeUninit, SizedTypeProperties};
4use crate::num::NonZero;
5use crate::ops::{CoerceUnsized, DispatchFromDyn};
6use crate::pin::PinCoerceUnsized;
7use crate::ptr::Unique;
8use crate::slice::{self, SliceIndex};
9use crate::ub_checks::assert_unsafe_precondition;
10use crate::{fmt, hash, intrinsics, mem, ptr};
11
12/// `*mut T` but non-zero and [covariant].
13///
14/// This is often the correct thing to use when building data structures using
15/// raw pointers, but is ultimately more dangerous to use because of its additional
16/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
17///
18/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
19/// is never dereferenced. This is so that enums may use this forbidden value
20/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
21/// However the pointer may still dangle if it isn't dereferenced.
22///
23/// Unlike `*mut T`, `NonNull<T>` was chosen to be covariant over `T`. This makes it
24/// possible to use `NonNull<T>` when building covariant types, but introduces the
25/// risk of unsoundness if used in a type that shouldn't actually be covariant.
26/// (The opposite choice was made for `*mut T` even though technically the unsoundness
27/// could only be caused by calling unsafe functions.)
28///
29/// Covariance is correct for most safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
30/// and `LinkedList`. This is the case because they provide a public API that follows the
31/// normal shared XOR mutable rules of Rust.
32///
33/// If your type cannot safely be covariant, you must ensure it contains some
34/// additional field to provide invariance. Often this field will be a [`PhantomData`]
35/// type like `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
36///
37/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
38/// not change the fact that mutating through a (pointer derived from a) shared
39/// reference is undefined behavior unless the mutation happens inside an
40/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
41/// reference. When using this `From` instance without an `UnsafeCell<T>`,
42/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
43/// is never used for mutation.
44///
45/// # Representation
46///
47/// Thanks to the [null pointer optimization],
48/// `NonNull<T>` and `Option<NonNull<T>>`
49/// are guaranteed to have the same size and alignment:
50///
51/// ```
52/// use std::ptr::NonNull;
53///
54/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
55/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
56///
57/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
58/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
59/// ```
60///
61/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
62/// [`PhantomData`]: crate::marker::PhantomData
63/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
64/// [null pointer optimization]: crate::option#representation
65#[stable(feature = "nonnull", since = "1.25.0")]
66#[repr(transparent)]
67#[rustc_layout_scalar_valid_range_start(1)]
68#[rustc_nonnull_optimization_guaranteed]
69#[rustc_diagnostic_item = "NonNull"]
70pub struct NonNull<T: ?Sized> {
71 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
72 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
73 pointer: *const T,
74}
75
76/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
77// N.B., this impl is unnecessary, but should provide better error messages.
78#[stable(feature = "nonnull", since = "1.25.0")]
79impl<T: ?Sized> !Send for NonNull<T> {}
80
81/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
82// N.B., this impl is unnecessary, but should provide better error messages.
83#[stable(feature = "nonnull", since = "1.25.0")]
84impl<T: ?Sized> !Sync for NonNull<T> {}
85
86impl<T: Sized> NonNull<T> {
87 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
88 ///
89 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
90 ///
91 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
92 #[unstable(feature = "nonnull_provenance", issue = "135243")]
93 #[must_use]
94 #[inline]
95 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
96 let pointer = crate::ptr::without_provenance(addr.get());
97 // SAFETY: we know `addr` is non-zero.
98 unsafe { NonNull { pointer } }
99 }
100
101 /// Creates a new `NonNull` that is dangling, but well-aligned.
102 ///
103 /// This is useful for initializing types which lazily allocate, like
104 /// `Vec::new` does.
105 ///
106 /// Note that the pointer value may potentially represent a valid pointer to
107 /// a `T`, which means this must not be used as a "not yet initialized"
108 /// sentinel value. Types that lazily allocate must track initialization by
109 /// some other means.
110 ///
111 /// # Examples
112 ///
113 /// ```
114 /// use std::ptr::NonNull;
115 ///
116 /// let ptr = NonNull::<u32>::dangling();
117 /// // Important: don't try to access the value of `ptr` without
118 /// // initializing it first! The pointer is not null but isn't valid either!
119 /// ```
120 #[stable(feature = "nonnull", since = "1.25.0")]
121 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
122 #[must_use]
123 #[inline]
124 pub const fn dangling() -> Self {
125 let align = crate::ptr::Alignment::of::<T>();
126 NonNull::without_provenance(align.as_nonzero())
127 }
128
129 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
130 /// [provenance][crate::ptr#provenance].
131 ///
132 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
133 ///
134 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
135 #[unstable(feature = "nonnull_provenance", issue = "135243")]
136 #[inline]
137 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
138 // SAFETY: we know `addr` is non-zero.
139 unsafe {
140 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
141 NonNull::new_unchecked(ptr)
142 }
143 }
144
145 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
146 /// that the value has to be initialized.
147 ///
148 /// For the mutable counterpart see [`as_uninit_mut`].
149 ///
150 /// [`as_ref`]: NonNull::as_ref
151 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
152 ///
153 /// # Safety
154 ///
155 /// When calling this method, you have to ensure that
156 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
157 /// Note that because the created reference is to `MaybeUninit<T>`, the
158 /// source pointer can point to uninitialized memory.
159 #[inline]
160 #[must_use]
161 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
162 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
163 // SAFETY: the caller must guarantee that `self` meets all the
164 // requirements for a reference.
165 unsafe { &*self.cast().as_ptr() }
166 }
167
168 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
169 /// that the value has to be initialized.
170 ///
171 /// For the shared counterpart see [`as_uninit_ref`].
172 ///
173 /// [`as_mut`]: NonNull::as_mut
174 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
175 ///
176 /// # Safety
177 ///
178 /// When calling this method, you have to ensure that
179 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
180 /// Note that because the created reference is to `MaybeUninit<T>`, the
181 /// source pointer can point to uninitialized memory.
182 #[inline]
183 #[must_use]
184 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
185 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
186 // SAFETY: the caller must guarantee that `self` meets all the
187 // requirements for a reference.
188 unsafe { &mut *self.cast().as_ptr() }
189 }
190}
191
192impl<T: ?Sized> NonNull<T> {
193 /// Creates a new `NonNull`.
194 ///
195 /// # Safety
196 ///
197 /// `ptr` must be non-null.
198 ///
199 /// # Examples
200 ///
201 /// ```
202 /// use std::ptr::NonNull;
203 ///
204 /// let mut x = 0u32;
205 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
206 /// ```
207 ///
208 /// *Incorrect* usage of this function:
209 ///
210 /// ```rust,no_run
211 /// use std::ptr::NonNull;
212 ///
213 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
214 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
215 /// ```
216 #[stable(feature = "nonnull", since = "1.25.0")]
217 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
218 #[inline]
219 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
220 // SAFETY: the caller must guarantee that `ptr` is non-null.
221 unsafe {
222 assert_unsafe_precondition!(
223 check_language_ub,
224 "NonNull::new_unchecked requires that the pointer is non-null",
225 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
226 );
227 NonNull { pointer: ptr as _ }
228 }
229 }
230
231 /// Creates a new `NonNull` if `ptr` is non-null.
232 ///
233 /// # Panics during const evaluation
234 ///
235 /// This method will panic during const evaluation if the pointer cannot be
236 /// determined to be null or not. See [`is_null`] for more information.
237 ///
238 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
239 ///
240 /// # Examples
241 ///
242 /// ```
243 /// use std::ptr::NonNull;
244 ///
245 /// let mut x = 0u32;
246 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
247 ///
248 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
249 /// unreachable!();
250 /// }
251 /// ```
252 #[stable(feature = "nonnull", since = "1.25.0")]
253 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
254 #[inline]
255 pub const fn new(ptr: *mut T) -> Option<Self> {
256 if !ptr.is_null() {
257 // SAFETY: The pointer is already checked and is not null
258 Some(unsafe { Self::new_unchecked(ptr) })
259 } else {
260 None
261 }
262 }
263
264 /// Converts a reference to a `NonNull` pointer.
265 #[unstable(feature = "non_null_from_ref", issue = "130823")]
266 #[inline]
267 pub const fn from_ref(r: &T) -> Self {
268 // SAFETY: A reference cannot be null.
269 unsafe { NonNull { pointer: r as *const T } }
270 }
271
272 /// Converts a mutable reference to a `NonNull` pointer.
273 #[unstable(feature = "non_null_from_ref", issue = "130823")]
274 #[inline]
275 pub const fn from_mut(r: &mut T) -> Self {
276 // SAFETY: A mutable reference cannot be null.
277 unsafe { NonNull { pointer: r as *mut T } }
278 }
279
280 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
281 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
282 ///
283 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
284 ///
285 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
286 #[unstable(feature = "ptr_metadata", issue = "81513")]
287 #[inline]
288 pub const fn from_raw_parts(
289 data_pointer: NonNull<impl super::Thin>,
290 metadata: <T as super::Pointee>::Metadata,
291 ) -> NonNull<T> {
292 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
293 unsafe {
294 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
295 }
296 }
297
298 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
299 ///
300 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
301 #[unstable(feature = "ptr_metadata", issue = "81513")]
302 #[must_use = "this returns the result of the operation, \
303 without modifying the original"]
304 #[inline]
305 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
306 (self.cast(), super::metadata(self.as_ptr()))
307 }
308
309 /// Gets the "address" portion of the pointer.
310 ///
311 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
312 ///
313 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
314 #[must_use]
315 #[inline]
316 #[stable(feature = "strict_provenance", since = "1.84.0")]
317 pub fn addr(self) -> NonZero<usize> {
318 // SAFETY: The pointer is guaranteed by the type to be non-null,
319 // meaning that the address will be non-zero.
320 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
321 }
322
323 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
324 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
325 ///
326 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
327 ///
328 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
329 #[unstable(feature = "nonnull_provenance", issue = "135243")]
330 pub fn expose_provenance(self) -> NonZero<usize> {
331 // SAFETY: The pointer is guaranteed by the type to be non-null,
332 // meaning that the address will be non-zero.
333 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
334 }
335
336 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
337 /// `self`.
338 ///
339 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
340 ///
341 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
342 #[must_use]
343 #[inline]
344 #[stable(feature = "strict_provenance", since = "1.84.0")]
345 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
346 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
347 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
348 }
349
350 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
351 /// [provenance][crate::ptr#provenance] of `self`.
352 ///
353 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
354 ///
355 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
356 #[must_use]
357 #[inline]
358 #[stable(feature = "strict_provenance", since = "1.84.0")]
359 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
360 self.with_addr(f(self.addr()))
361 }
362
363 /// Acquires the underlying `*mut` pointer.
364 ///
365 /// # Examples
366 ///
367 /// ```
368 /// use std::ptr::NonNull;
369 ///
370 /// let mut x = 0u32;
371 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
372 ///
373 /// let x_value = unsafe { *ptr.as_ptr() };
374 /// assert_eq!(x_value, 0);
375 ///
376 /// unsafe { *ptr.as_ptr() += 2; }
377 /// let x_value = unsafe { *ptr.as_ptr() };
378 /// assert_eq!(x_value, 2);
379 /// ```
380 #[stable(feature = "nonnull", since = "1.25.0")]
381 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
382 #[rustc_never_returns_null_ptr]
383 #[must_use]
384 #[inline(always)]
385 pub const fn as_ptr(self) -> *mut T {
386 // This is a transmute for the same reasons as `NonZero::get`.
387
388 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
389 // and `*mut T` have the same layout, so transitively we can transmute
390 // our `NonNull` to a `*mut T` directly.
391 unsafe { mem::transmute::<Self, *mut T>(self) }
392 }
393
394 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
395 /// must be used instead.
396 ///
397 /// For the mutable counterpart see [`as_mut`].
398 ///
399 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
400 /// [`as_mut`]: NonNull::as_mut
401 ///
402 /// # Safety
403 ///
404 /// When calling this method, you have to ensure that
405 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
406 ///
407 /// # Examples
408 ///
409 /// ```
410 /// use std::ptr::NonNull;
411 ///
412 /// let mut x = 0u32;
413 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
414 ///
415 /// let ref_x = unsafe { ptr.as_ref() };
416 /// println!("{ref_x}");
417 /// ```
418 ///
419 /// [the module documentation]: crate::ptr#safety
420 #[stable(feature = "nonnull", since = "1.25.0")]
421 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
422 #[must_use]
423 #[inline(always)]
424 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
425 // SAFETY: the caller must guarantee that `self` meets all the
426 // requirements for a reference.
427 // `cast_const` avoids a mutable raw pointer deref.
428 unsafe { &*self.as_ptr().cast_const() }
429 }
430
431 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
432 /// must be used instead.
433 ///
434 /// For the shared counterpart see [`as_ref`].
435 ///
436 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
437 /// [`as_ref`]: NonNull::as_ref
438 ///
439 /// # Safety
440 ///
441 /// When calling this method, you have to ensure that
442 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
443 /// # Examples
444 ///
445 /// ```
446 /// use std::ptr::NonNull;
447 ///
448 /// let mut x = 0u32;
449 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
450 ///
451 /// let x_ref = unsafe { ptr.as_mut() };
452 /// assert_eq!(*x_ref, 0);
453 /// *x_ref += 2;
454 /// assert_eq!(*x_ref, 2);
455 /// ```
456 ///
457 /// [the module documentation]: crate::ptr#safety
458 #[stable(feature = "nonnull", since = "1.25.0")]
459 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
460 #[must_use]
461 #[inline(always)]
462 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
463 // SAFETY: the caller must guarantee that `self` meets all the
464 // requirements for a mutable reference.
465 unsafe { &mut *self.as_ptr() }
466 }
467
468 /// Casts to a pointer of another type.
469 ///
470 /// # Examples
471 ///
472 /// ```
473 /// use std::ptr::NonNull;
474 ///
475 /// let mut x = 0u32;
476 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
477 ///
478 /// let casted_ptr = ptr.cast::<i8>();
479 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
480 /// ```
481 #[stable(feature = "nonnull_cast", since = "1.27.0")]
482 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
483 #[must_use = "this returns the result of the operation, \
484 without modifying the original"]
485 #[inline]
486 pub const fn cast<U>(self) -> NonNull<U> {
487 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
488 unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
489 }
490
491 /// Adds an offset to a pointer.
492 ///
493 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
494 /// offset of `3 * size_of::<T>()` bytes.
495 ///
496 /// # Safety
497 ///
498 /// If any of the following conditions are violated, the result is Undefined Behavior:
499 ///
500 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
501 ///
502 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
503 /// [allocated object], and the entire memory range between `self` and the result must be in
504 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
505 /// of the address space.
506 ///
507 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
508 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
509 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
510 /// safe.
511 ///
512 /// [allocated object]: crate::ptr#allocated-object
513 ///
514 /// # Examples
515 ///
516 /// ```
517 /// use std::ptr::NonNull;
518 ///
519 /// let mut s = [1, 2, 3];
520 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
521 ///
522 /// unsafe {
523 /// println!("{}", ptr.offset(1).read());
524 /// println!("{}", ptr.offset(2).read());
525 /// }
526 /// ```
527 #[inline(always)]
528 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
529 #[must_use = "returns a new pointer rather than modifying its argument"]
530 #[stable(feature = "non_null_convenience", since = "1.80.0")]
531 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
532 pub const unsafe fn offset(self, count: isize) -> Self
533 where
534 T: Sized,
535 {
536 // SAFETY: the caller must uphold the safety contract for `offset`.
537 // Additionally safety contract of `offset` guarantees that the resulting pointer is
538 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
539 // construct `NonNull`.
540 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
541 }
542
543 /// Calculates the offset from a pointer in bytes.
544 ///
545 /// `count` is in units of **bytes**.
546 ///
547 /// This is purely a convenience for casting to a `u8` pointer and
548 /// using [offset][pointer::offset] on it. See that method for documentation
549 /// and safety requirements.
550 ///
551 /// For non-`Sized` pointees this operation changes only the data pointer,
552 /// leaving the metadata untouched.
553 #[must_use]
554 #[inline(always)]
555 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
556 #[stable(feature = "non_null_convenience", since = "1.80.0")]
557 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
558 pub const unsafe fn byte_offset(self, count: isize) -> Self {
559 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
560 // the same safety contract.
561 // Additionally safety contract of `offset` guarantees that the resulting pointer is
562 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
563 // construct `NonNull`.
564 unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
565 }
566
567 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
568 ///
569 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
570 /// offset of `3 * size_of::<T>()` bytes.
571 ///
572 /// # Safety
573 ///
574 /// If any of the following conditions are violated, the result is Undefined Behavior:
575 ///
576 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
577 ///
578 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
579 /// [allocated object], and the entire memory range between `self` and the result must be in
580 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
581 /// of the address space.
582 ///
583 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
584 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
585 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
586 /// safe.
587 ///
588 /// [allocated object]: crate::ptr#allocated-object
589 ///
590 /// # Examples
591 ///
592 /// ```
593 /// use std::ptr::NonNull;
594 ///
595 /// let s: &str = "123";
596 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
597 ///
598 /// unsafe {
599 /// println!("{}", ptr.add(1).read() as char);
600 /// println!("{}", ptr.add(2).read() as char);
601 /// }
602 /// ```
603 #[inline(always)]
604 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
605 #[must_use = "returns a new pointer rather than modifying its argument"]
606 #[stable(feature = "non_null_convenience", since = "1.80.0")]
607 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
608 pub const unsafe fn add(self, count: usize) -> Self
609 where
610 T: Sized,
611 {
612 // SAFETY: the caller must uphold the safety contract for `offset`.
613 // Additionally safety contract of `offset` guarantees that the resulting pointer is
614 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
615 // construct `NonNull`.
616 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
617 }
618
619 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
620 ///
621 /// `count` is in units of bytes.
622 ///
623 /// This is purely a convenience for casting to a `u8` pointer and
624 /// using [`add`][NonNull::add] on it. See that method for documentation
625 /// and safety requirements.
626 ///
627 /// For non-`Sized` pointees this operation changes only the data pointer,
628 /// leaving the metadata untouched.
629 #[must_use]
630 #[inline(always)]
631 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
632 #[stable(feature = "non_null_convenience", since = "1.80.0")]
633 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
634 pub const unsafe fn byte_add(self, count: usize) -> Self {
635 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
636 // safety contract.
637 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
638 // to an allocation, there can't be an allocation at null, thus it's safe to construct
639 // `NonNull`.
640 unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
641 }
642
643 /// Subtracts an offset from a pointer (convenience for
644 /// `.offset((count as isize).wrapping_neg())`).
645 ///
646 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
647 /// offset of `3 * size_of::<T>()` bytes.
648 ///
649 /// # Safety
650 ///
651 /// If any of the following conditions are violated, the result is Undefined Behavior:
652 ///
653 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
654 ///
655 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
656 /// [allocated object], and the entire memory range between `self` and the result must be in
657 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
658 /// of the address space.
659 ///
660 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
661 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
662 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
663 /// safe.
664 ///
665 /// [allocated object]: crate::ptr#allocated-object
666 ///
667 /// # Examples
668 ///
669 /// ```
670 /// use std::ptr::NonNull;
671 ///
672 /// let s: &str = "123";
673 ///
674 /// unsafe {
675 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
676 /// println!("{}", end.sub(1).read() as char);
677 /// println!("{}", end.sub(2).read() as char);
678 /// }
679 /// ```
680 #[inline(always)]
681 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
682 #[must_use = "returns a new pointer rather than modifying its argument"]
683 #[stable(feature = "non_null_convenience", since = "1.80.0")]
684 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
685 pub const unsafe fn sub(self, count: usize) -> Self
686 where
687 T: Sized,
688 {
689 if T::IS_ZST {
690 // Pointer arithmetic does nothing when the pointee is a ZST.
691 self
692 } else {
693 // SAFETY: the caller must uphold the safety contract for `offset`.
694 // Because the pointee is *not* a ZST, that means that `count` is
695 // at most `isize::MAX`, and thus the negation cannot overflow.
696 unsafe { self.offset((count as isize).unchecked_neg()) }
697 }
698 }
699
700 /// Calculates the offset from a pointer in bytes (convenience for
701 /// `.byte_offset((count as isize).wrapping_neg())`).
702 ///
703 /// `count` is in units of bytes.
704 ///
705 /// This is purely a convenience for casting to a `u8` pointer and
706 /// using [`sub`][NonNull::sub] on it. See that method for documentation
707 /// and safety requirements.
708 ///
709 /// For non-`Sized` pointees this operation changes only the data pointer,
710 /// leaving the metadata untouched.
711 #[must_use]
712 #[inline(always)]
713 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
714 #[stable(feature = "non_null_convenience", since = "1.80.0")]
715 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
716 pub const unsafe fn byte_sub(self, count: usize) -> Self {
717 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
718 // safety contract.
719 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
720 // to an allocation, there can't be an allocation at null, thus it's safe to construct
721 // `NonNull`.
722 unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
723 }
724
725 /// Calculates the distance between two pointers within the same allocation. The returned value is in
726 /// units of T: the distance in bytes divided by `size_of::<T>()`.
727 ///
728 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
729 /// except that it has a lot more opportunities for UB, in exchange for the compiler
730 /// better understanding what you are doing.
731 ///
732 /// The primary motivation of this method is for computing the `len` of an array/slice
733 /// of `T` that you are currently representing as a "start" and "end" pointer
734 /// (and "end" is "one past the end" of the array).
735 /// In that case, `end.offset_from(start)` gets you the length of the array.
736 ///
737 /// All of the following safety requirements are trivially satisfied for this usecase.
738 ///
739 /// [`offset`]: #method.offset
740 ///
741 /// # Safety
742 ///
743 /// If any of the following conditions are violated, the result is Undefined Behavior:
744 ///
745 /// * `self` and `origin` must either
746 ///
747 /// * point to the same address, or
748 /// * both be *derived from* a pointer to the same [allocated object], and the memory range between
749 /// the two pointers must be in bounds of that object. (See below for an example.)
750 ///
751 /// * The distance between the pointers, in bytes, must be an exact multiple
752 /// of the size of `T`.
753 ///
754 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
755 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
756 /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
757 /// than `isize::MAX` bytes.
758 ///
759 /// The requirement for pointers to be derived from the same allocated object is primarily
760 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
761 /// objects is not known at compile-time. However, the requirement also exists at
762 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
763 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
764 /// origin as isize) / size_of::<T>()`.
765 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
766 ///
767 /// [`add`]: #method.add
768 /// [allocated object]: crate::ptr#allocated-object
769 ///
770 /// # Panics
771 ///
772 /// This function panics if `T` is a Zero-Sized Type ("ZST").
773 ///
774 /// # Examples
775 ///
776 /// Basic usage:
777 ///
778 /// ```
779 /// use std::ptr::NonNull;
780 ///
781 /// let a = [0; 5];
782 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
783 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
784 /// unsafe {
785 /// assert_eq!(ptr2.offset_from(ptr1), 2);
786 /// assert_eq!(ptr1.offset_from(ptr2), -2);
787 /// assert_eq!(ptr1.offset(2), ptr2);
788 /// assert_eq!(ptr2.offset(-2), ptr1);
789 /// }
790 /// ```
791 ///
792 /// *Incorrect* usage:
793 ///
794 /// ```rust,no_run
795 /// use std::ptr::NonNull;
796 ///
797 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
798 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
799 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
800 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
801 /// let diff_plus_1 = diff.wrapping_add(1);
802 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
803 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
804 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
805 /// // computing their offset is undefined behavior, even though
806 /// // they point to addresses that are in-bounds of the same object!
807 ///
808 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
809 /// ```
810 #[inline]
811 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
812 #[stable(feature = "non_null_convenience", since = "1.80.0")]
813 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
814 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
815 where
816 T: Sized,
817 {
818 // SAFETY: the caller must uphold the safety contract for `offset_from`.
819 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
820 }
821
822 /// Calculates the distance between two pointers within the same allocation. The returned value is in
823 /// units of **bytes**.
824 ///
825 /// This is purely a convenience for casting to a `u8` pointer and
826 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
827 /// documentation and safety requirements.
828 ///
829 /// For non-`Sized` pointees this operation considers only the data pointers,
830 /// ignoring the metadata.
831 #[inline(always)]
832 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
833 #[stable(feature = "non_null_convenience", since = "1.80.0")]
834 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
835 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
836 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
837 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
838 }
839
840 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
841
842 /// Calculates the distance between two pointers within the same allocation, *where it's known that
843 /// `self` is equal to or greater than `origin`*. The returned value is in
844 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
845 ///
846 /// This computes the same value that [`offset_from`](#method.offset_from)
847 /// would compute, but with the added precondition that the offset is
848 /// guaranteed to be non-negative. This method is equivalent to
849 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
850 /// but it provides slightly more information to the optimizer, which can
851 /// sometimes allow it to optimize slightly better with some backends.
852 ///
853 /// This method can be though of as recovering the `count` that was passed
854 /// to [`add`](#method.add) (or, with the parameters in the other order,
855 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
856 /// that their safety preconditions are met:
857 /// ```rust
858 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
859 /// ptr.offset_from_unsigned(origin) == count
860 /// # &&
861 /// origin.add(count) == ptr
862 /// # &&
863 /// ptr.sub(count) == origin
864 /// # } }
865 /// ```
866 ///
867 /// # Safety
868 ///
869 /// - The distance between the pointers must be non-negative (`self >= origin`)
870 ///
871 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
872 /// apply to this method as well; see it for the full details.
873 ///
874 /// Importantly, despite the return type of this method being able to represent
875 /// a larger offset, it's still *not permitted* to pass pointers which differ
876 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
877 /// always be less than or equal to `isize::MAX as usize`.
878 ///
879 /// # Panics
880 ///
881 /// This function panics if `T` is a Zero-Sized Type ("ZST").
882 ///
883 /// # Examples
884 ///
885 /// ```
886 /// use std::ptr::NonNull;
887 ///
888 /// let a = [0; 5];
889 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
890 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
891 /// unsafe {
892 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
893 /// assert_eq!(ptr1.add(2), ptr2);
894 /// assert_eq!(ptr2.sub(2), ptr1);
895 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
896 /// }
897 ///
898 /// // This would be incorrect, as the pointers are not correctly ordered:
899 /// // ptr1.offset_from_unsigned(ptr2)
900 /// ```
901 #[inline]
902 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
903 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
904 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
905 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
906 where
907 T: Sized,
908 {
909 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
910 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
911 }
912
913 /// Calculates the distance between two pointers within the same allocation, *where it's known that
914 /// `self` is equal to or greater than `origin`*. The returned value is in
915 /// units of **bytes**.
916 ///
917 /// This is purely a convenience for casting to a `u8` pointer and
918 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
919 /// See that method for documentation and safety requirements.
920 ///
921 /// For non-`Sized` pointees this operation considers only the data pointers,
922 /// ignoring the metadata.
923 #[inline(always)]
924 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
925 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
926 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
927 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
928 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
929 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
930 }
931
932 /// Reads the value from `self` without moving it. This leaves the
933 /// memory in `self` unchanged.
934 ///
935 /// See [`ptr::read`] for safety concerns and examples.
936 ///
937 /// [`ptr::read`]: crate::ptr::read()
938 #[inline]
939 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
940 #[stable(feature = "non_null_convenience", since = "1.80.0")]
941 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
942 pub const unsafe fn read(self) -> T
943 where
944 T: Sized,
945 {
946 // SAFETY: the caller must uphold the safety contract for `read`.
947 unsafe { ptr::read(self.as_ptr()) }
948 }
949
950 /// Performs a volatile read of the value from `self` without moving it. This
951 /// leaves the memory in `self` unchanged.
952 ///
953 /// Volatile operations are intended to act on I/O memory, and are guaranteed
954 /// to not be elided or reordered by the compiler across other volatile
955 /// operations.
956 ///
957 /// See [`ptr::read_volatile`] for safety concerns and examples.
958 ///
959 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
960 #[inline]
961 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
962 #[stable(feature = "non_null_convenience", since = "1.80.0")]
963 pub unsafe fn read_volatile(self) -> T
964 where
965 T: Sized,
966 {
967 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
968 unsafe { ptr::read_volatile(self.as_ptr()) }
969 }
970
971 /// Reads the value from `self` without moving it. This leaves the
972 /// memory in `self` unchanged.
973 ///
974 /// Unlike `read`, the pointer may be unaligned.
975 ///
976 /// See [`ptr::read_unaligned`] for safety concerns and examples.
977 ///
978 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
979 #[inline]
980 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
981 #[stable(feature = "non_null_convenience", since = "1.80.0")]
982 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
983 pub const unsafe fn read_unaligned(self) -> T
984 where
985 T: Sized,
986 {
987 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
988 unsafe { ptr::read_unaligned(self.as_ptr()) }
989 }
990
991 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
992 /// and destination may overlap.
993 ///
994 /// NOTE: this has the *same* argument order as [`ptr::copy`].
995 ///
996 /// See [`ptr::copy`] for safety concerns and examples.
997 ///
998 /// [`ptr::copy`]: crate::ptr::copy()
999 #[inline(always)]
1000 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1001 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1002 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1003 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1004 where
1005 T: Sized,
1006 {
1007 // SAFETY: the caller must uphold the safety contract for `copy`.
1008 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1009 }
1010
1011 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1012 /// and destination may *not* overlap.
1013 ///
1014 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1015 ///
1016 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1017 ///
1018 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1019 #[inline(always)]
1020 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1021 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1022 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1023 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1024 where
1025 T: Sized,
1026 {
1027 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1028 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1029 }
1030
1031 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1032 /// and destination may overlap.
1033 ///
1034 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1035 ///
1036 /// See [`ptr::copy`] for safety concerns and examples.
1037 ///
1038 /// [`ptr::copy`]: crate::ptr::copy()
1039 #[inline(always)]
1040 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1041 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1042 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1043 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1044 where
1045 T: Sized,
1046 {
1047 // SAFETY: the caller must uphold the safety contract for `copy`.
1048 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1049 }
1050
1051 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1052 /// and destination may *not* overlap.
1053 ///
1054 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1055 ///
1056 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1057 ///
1058 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1059 #[inline(always)]
1060 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1061 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1062 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1063 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1064 where
1065 T: Sized,
1066 {
1067 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1068 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1069 }
1070
1071 /// Executes the destructor (if any) of the pointed-to value.
1072 ///
1073 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1074 ///
1075 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1076 #[inline(always)]
1077 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1078 pub unsafe fn drop_in_place(self) {
1079 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1080 unsafe { ptr::drop_in_place(self.as_ptr()) }
1081 }
1082
1083 /// Overwrites a memory location with the given value without reading or
1084 /// dropping the old value.
1085 ///
1086 /// See [`ptr::write`] for safety concerns and examples.
1087 ///
1088 /// [`ptr::write`]: crate::ptr::write()
1089 #[inline(always)]
1090 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1091 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1092 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1093 pub const unsafe fn write(self, val: T)
1094 where
1095 T: Sized,
1096 {
1097 // SAFETY: the caller must uphold the safety contract for `write`.
1098 unsafe { ptr::write(self.as_ptr(), val) }
1099 }
1100
1101 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1102 /// bytes of memory starting at `self` to `val`.
1103 ///
1104 /// See [`ptr::write_bytes`] for safety concerns and examples.
1105 ///
1106 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1107 #[inline(always)]
1108 #[doc(alias = "memset")]
1109 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1110 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1111 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1112 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1113 where
1114 T: Sized,
1115 {
1116 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1117 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1118 }
1119
1120 /// Performs a volatile write of a memory location with the given value without
1121 /// reading or dropping the old value.
1122 ///
1123 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1124 /// to not be elided or reordered by the compiler across other volatile
1125 /// operations.
1126 ///
1127 /// See [`ptr::write_volatile`] for safety concerns and examples.
1128 ///
1129 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1130 #[inline(always)]
1131 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1132 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1133 pub unsafe fn write_volatile(self, val: T)
1134 where
1135 T: Sized,
1136 {
1137 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1138 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1139 }
1140
1141 /// Overwrites a memory location with the given value without reading or
1142 /// dropping the old value.
1143 ///
1144 /// Unlike `write`, the pointer may be unaligned.
1145 ///
1146 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1147 ///
1148 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1149 #[inline(always)]
1150 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1151 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1152 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1153 pub const unsafe fn write_unaligned(self, val: T)
1154 where
1155 T: Sized,
1156 {
1157 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1158 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1159 }
1160
1161 /// Replaces the value at `self` with `src`, returning the old
1162 /// value, without dropping either.
1163 ///
1164 /// See [`ptr::replace`] for safety concerns and examples.
1165 ///
1166 /// [`ptr::replace`]: crate::ptr::replace()
1167 #[inline(always)]
1168 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1169 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "CURRENT_RUSTC_VERSION")]
1170 pub const unsafe fn replace(self, src: T) -> T
1171 where
1172 T: Sized,
1173 {
1174 // SAFETY: the caller must uphold the safety contract for `replace`.
1175 unsafe { ptr::replace(self.as_ptr(), src) }
1176 }
1177
1178 /// Swaps the values at two mutable locations of the same type, without
1179 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1180 /// otherwise equivalent.
1181 ///
1182 /// See [`ptr::swap`] for safety concerns and examples.
1183 ///
1184 /// [`ptr::swap`]: crate::ptr::swap()
1185 #[inline(always)]
1186 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1187 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1188 pub const unsafe fn swap(self, with: NonNull<T>)
1189 where
1190 T: Sized,
1191 {
1192 // SAFETY: the caller must uphold the safety contract for `swap`.
1193 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1194 }
1195
1196 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1197 /// `align`.
1198 ///
1199 /// If it is not possible to align the pointer, the implementation returns
1200 /// `usize::MAX`.
1201 ///
1202 /// The offset is expressed in number of `T` elements, and not bytes.
1203 ///
1204 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1205 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1206 /// the returned offset is correct in all terms other than alignment.
1207 ///
1208 /// When this is called during compile-time evaluation (which is unstable), the implementation
1209 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1210 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1211 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1212 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1213 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1214 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1215 /// for unstable APIs.)
1216 ///
1217 /// # Panics
1218 ///
1219 /// The function panics if `align` is not a power-of-two.
1220 ///
1221 /// # Examples
1222 ///
1223 /// Accessing adjacent `u8` as `u16`
1224 ///
1225 /// ```
1226 /// use std::ptr::NonNull;
1227 ///
1228 /// # unsafe {
1229 /// let x = [5_u8, 6, 7, 8, 9];
1230 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1231 /// let offset = ptr.align_offset(align_of::<u16>());
1232 ///
1233 /// if offset < x.len() - 1 {
1234 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1235 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1236 /// } else {
1237 /// // while the pointer can be aligned via `offset`, it would point
1238 /// // outside the allocation
1239 /// }
1240 /// # }
1241 /// ```
1242 #[inline]
1243 #[must_use]
1244 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1245 pub fn align_offset(self, align: usize) -> usize
1246 where
1247 T: Sized,
1248 {
1249 if !align.is_power_of_two() {
1250 panic!("align_offset: align is not a power-of-two");
1251 }
1252
1253 {
1254 // SAFETY: `align` has been checked to be a power of 2 above.
1255 unsafe { ptr::align_offset(self.as_ptr(), align) }
1256 }
1257 }
1258
1259 /// Returns whether the pointer is properly aligned for `T`.
1260 ///
1261 /// # Examples
1262 ///
1263 /// ```
1264 /// use std::ptr::NonNull;
1265 ///
1266 /// // On some platforms, the alignment of i32 is less than 4.
1267 /// #[repr(align(4))]
1268 /// struct AlignedI32(i32);
1269 ///
1270 /// let data = AlignedI32(42);
1271 /// let ptr = NonNull::<AlignedI32>::from(&data);
1272 ///
1273 /// assert!(ptr.is_aligned());
1274 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1275 /// ```
1276 #[inline]
1277 #[must_use]
1278 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1279 pub fn is_aligned(self) -> bool
1280 where
1281 T: Sized,
1282 {
1283 self.as_ptr().is_aligned()
1284 }
1285
1286 /// Returns whether the pointer is aligned to `align`.
1287 ///
1288 /// For non-`Sized` pointees this operation considers only the data pointer,
1289 /// ignoring the metadata.
1290 ///
1291 /// # Panics
1292 ///
1293 /// The function panics if `align` is not a power-of-two (this includes 0).
1294 ///
1295 /// # Examples
1296 ///
1297 /// ```
1298 /// #![feature(pointer_is_aligned_to)]
1299 ///
1300 /// // On some platforms, the alignment of i32 is less than 4.
1301 /// #[repr(align(4))]
1302 /// struct AlignedI32(i32);
1303 ///
1304 /// let data = AlignedI32(42);
1305 /// let ptr = &data as *const AlignedI32;
1306 ///
1307 /// assert!(ptr.is_aligned_to(1));
1308 /// assert!(ptr.is_aligned_to(2));
1309 /// assert!(ptr.is_aligned_to(4));
1310 ///
1311 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1312 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1313 ///
1314 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1315 /// ```
1316 #[inline]
1317 #[must_use]
1318 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1319 pub fn is_aligned_to(self, align: usize) -> bool {
1320 self.as_ptr().is_aligned_to(align)
1321 }
1322}
1323
1324impl<T> NonNull<[T]> {
1325 /// Creates a non-null raw slice from a thin pointer and a length.
1326 ///
1327 /// The `len` argument is the number of **elements**, not the number of bytes.
1328 ///
1329 /// This function is safe, but dereferencing the return value is unsafe.
1330 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1331 ///
1332 /// # Examples
1333 ///
1334 /// ```rust
1335 /// use std::ptr::NonNull;
1336 ///
1337 /// // create a slice pointer when starting out with a pointer to the first element
1338 /// let mut x = [5, 6, 7];
1339 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1340 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1341 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1342 /// ```
1343 ///
1344 /// (Note that this example artificially demonstrates a use of this method,
1345 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1346 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1347 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1348 #[must_use]
1349 #[inline]
1350 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1351 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1352 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1353 }
1354
1355 /// Returns the length of a non-null raw slice.
1356 ///
1357 /// The returned value is the number of **elements**, not the number of bytes.
1358 ///
1359 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1360 /// because the pointer does not have a valid address.
1361 ///
1362 /// # Examples
1363 ///
1364 /// ```rust
1365 /// use std::ptr::NonNull;
1366 ///
1367 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1368 /// assert_eq!(slice.len(), 3);
1369 /// ```
1370 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1371 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1372 #[must_use]
1373 #[inline]
1374 pub const fn len(self) -> usize {
1375 self.as_ptr().len()
1376 }
1377
1378 /// Returns `true` if the non-null raw slice has a length of 0.
1379 ///
1380 /// # Examples
1381 ///
1382 /// ```rust
1383 /// use std::ptr::NonNull;
1384 ///
1385 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1386 /// assert!(!slice.is_empty());
1387 /// ```
1388 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1389 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1390 #[must_use]
1391 #[inline]
1392 pub const fn is_empty(self) -> bool {
1393 self.len() == 0
1394 }
1395
1396 /// Returns a non-null pointer to the slice's buffer.
1397 ///
1398 /// # Examples
1399 ///
1400 /// ```rust
1401 /// #![feature(slice_ptr_get)]
1402 /// use std::ptr::NonNull;
1403 ///
1404 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1405 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1406 /// ```
1407 #[inline]
1408 #[must_use]
1409 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1410 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1411 self.cast()
1412 }
1413
1414 /// Returns a raw pointer to the slice's buffer.
1415 ///
1416 /// # Examples
1417 ///
1418 /// ```rust
1419 /// #![feature(slice_ptr_get)]
1420 /// use std::ptr::NonNull;
1421 ///
1422 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1423 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1424 /// ```
1425 #[inline]
1426 #[must_use]
1427 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1428 #[rustc_never_returns_null_ptr]
1429 pub const fn as_mut_ptr(self) -> *mut T {
1430 self.as_non_null_ptr().as_ptr()
1431 }
1432
1433 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1434 /// [`as_ref`], this does not require that the value has to be initialized.
1435 ///
1436 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1437 ///
1438 /// [`as_ref`]: NonNull::as_ref
1439 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1440 ///
1441 /// # Safety
1442 ///
1443 /// When calling this method, you have to ensure that all of the following is true:
1444 ///
1445 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1446 /// and it must be properly aligned. This means in particular:
1447 ///
1448 /// * The entire memory range of this slice must be contained within a single allocated object!
1449 /// Slices can never span across multiple allocated objects.
1450 ///
1451 /// * The pointer must be aligned even for zero-length slices. One
1452 /// reason for this is that enum layout optimizations may rely on references
1453 /// (including slices of any length) being aligned and non-null to distinguish
1454 /// them from other data. You can obtain a pointer that is usable as `data`
1455 /// for zero-length slices using [`NonNull::dangling()`].
1456 ///
1457 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1458 /// See the safety documentation of [`pointer::offset`].
1459 ///
1460 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1461 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1462 /// In particular, while this reference exists, the memory the pointer points to must
1463 /// not get mutated (except inside `UnsafeCell`).
1464 ///
1465 /// This applies even if the result of this method is unused!
1466 ///
1467 /// See also [`slice::from_raw_parts`].
1468 ///
1469 /// [valid]: crate::ptr#safety
1470 #[inline]
1471 #[must_use]
1472 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1473 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1474 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1475 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1476 }
1477
1478 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1479 /// [`as_mut`], this does not require that the value has to be initialized.
1480 ///
1481 /// For the shared counterpart see [`as_uninit_slice`].
1482 ///
1483 /// [`as_mut`]: NonNull::as_mut
1484 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1485 ///
1486 /// # Safety
1487 ///
1488 /// When calling this method, you have to ensure that all of the following is true:
1489 ///
1490 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1491 /// many bytes, and it must be properly aligned. This means in particular:
1492 ///
1493 /// * The entire memory range of this slice must be contained within a single allocated object!
1494 /// Slices can never span across multiple allocated objects.
1495 ///
1496 /// * The pointer must be aligned even for zero-length slices. One
1497 /// reason for this is that enum layout optimizations may rely on references
1498 /// (including slices of any length) being aligned and non-null to distinguish
1499 /// them from other data. You can obtain a pointer that is usable as `data`
1500 /// for zero-length slices using [`NonNull::dangling()`].
1501 ///
1502 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1503 /// See the safety documentation of [`pointer::offset`].
1504 ///
1505 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1506 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1507 /// In particular, while this reference exists, the memory the pointer points to must
1508 /// not get accessed (read or written) through any other pointer.
1509 ///
1510 /// This applies even if the result of this method is unused!
1511 ///
1512 /// See also [`slice::from_raw_parts_mut`].
1513 ///
1514 /// [valid]: crate::ptr#safety
1515 ///
1516 /// # Examples
1517 ///
1518 /// ```rust
1519 /// #![feature(allocator_api, ptr_as_uninit)]
1520 ///
1521 /// use std::alloc::{Allocator, Layout, Global};
1522 /// use std::mem::MaybeUninit;
1523 /// use std::ptr::NonNull;
1524 ///
1525 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1526 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1527 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1528 /// # #[allow(unused_variables)]
1529 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1530 /// # // Prevent leaks for Miri.
1531 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1532 /// # Ok::<_, std::alloc::AllocError>(())
1533 /// ```
1534 #[inline]
1535 #[must_use]
1536 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1537 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1538 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1539 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1540 }
1541
1542 /// Returns a raw pointer to an element or subslice, without doing bounds
1543 /// checking.
1544 ///
1545 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1546 /// is *[undefined behavior]* even if the resulting pointer is not used.
1547 ///
1548 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1549 ///
1550 /// # Examples
1551 ///
1552 /// ```
1553 /// #![feature(slice_ptr_get)]
1554 /// use std::ptr::NonNull;
1555 ///
1556 /// let x = &mut [1, 2, 4];
1557 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1558 ///
1559 /// unsafe {
1560 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1561 /// }
1562 /// ```
1563 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1564 #[inline]
1565 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1566 where
1567 I: SliceIndex<[T]>,
1568 {
1569 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1570 // As a consequence, the resulting pointer cannot be null.
1571 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1572 }
1573}
1574
1575#[stable(feature = "nonnull", since = "1.25.0")]
1576impl<T: ?Sized> Clone for NonNull<T> {
1577 #[inline(always)]
1578 fn clone(&self) -> Self {
1579 *self
1580 }
1581}
1582
1583#[stable(feature = "nonnull", since = "1.25.0")]
1584impl<T: ?Sized> Copy for NonNull<T> {}
1585
1586#[unstable(feature = "coerce_unsized", issue = "18598")]
1587impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1588
1589#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1590impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1591
1592#[stable(feature = "pin", since = "1.33.0")]
1593unsafe impl<T: ?Sized> PinCoerceUnsized for NonNull<T> {}
1594
1595#[unstable(feature = "pointer_like_trait", issue = "none")]
1596impl<T> core::marker::PointerLike for NonNull<T> {}
1597
1598#[stable(feature = "nonnull", since = "1.25.0")]
1599impl<T: ?Sized> fmt::Debug for NonNull<T> {
1600 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1601 fmt::Pointer::fmt(&self.as_ptr(), f)
1602 }
1603}
1604
1605#[stable(feature = "nonnull", since = "1.25.0")]
1606impl<T: ?Sized> fmt::Pointer for NonNull<T> {
1607 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1608 fmt::Pointer::fmt(&self.as_ptr(), f)
1609 }
1610}
1611
1612#[stable(feature = "nonnull", since = "1.25.0")]
1613impl<T: ?Sized> Eq for NonNull<T> {}
1614
1615#[stable(feature = "nonnull", since = "1.25.0")]
1616impl<T: ?Sized> PartialEq for NonNull<T> {
1617 #[inline]
1618 #[allow(ambiguous_wide_pointer_comparisons)]
1619 fn eq(&self, other: &Self) -> bool {
1620 self.as_ptr() == other.as_ptr()
1621 }
1622}
1623
1624#[stable(feature = "nonnull", since = "1.25.0")]
1625impl<T: ?Sized> Ord for NonNull<T> {
1626 #[inline]
1627 #[allow(ambiguous_wide_pointer_comparisons)]
1628 fn cmp(&self, other: &Self) -> Ordering {
1629 self.as_ptr().cmp(&other.as_ptr())
1630 }
1631}
1632
1633#[stable(feature = "nonnull", since = "1.25.0")]
1634impl<T: ?Sized> PartialOrd for NonNull<T> {
1635 #[inline]
1636 #[allow(ambiguous_wide_pointer_comparisons)]
1637 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1638 self.as_ptr().partial_cmp(&other.as_ptr())
1639 }
1640}
1641
1642#[stable(feature = "nonnull", since = "1.25.0")]
1643impl<T: ?Sized> hash::Hash for NonNull<T> {
1644 #[inline]
1645 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1646 self.as_ptr().hash(state)
1647 }
1648}
1649
1650#[unstable(feature = "ptr_internals", issue = "none")]
1651impl<T: ?Sized> From<Unique<T>> for NonNull<T> {
1652 #[inline]
1653 fn from(unique: Unique<T>) -> Self {
1654 unique.as_non_null_ptr()
1655 }
1656}
1657
1658#[stable(feature = "nonnull", since = "1.25.0")]
1659impl<T: ?Sized> From<&mut T> for NonNull<T> {
1660 /// Converts a `&mut T` to a `NonNull<T>`.
1661 ///
1662 /// This conversion is safe and infallible since references cannot be null.
1663 #[inline]
1664 fn from(r: &mut T) -> Self {
1665 NonNull::from_mut(r)
1666 }
1667}
1668
1669#[stable(feature = "nonnull", since = "1.25.0")]
1670impl<T: ?Sized> From<&T> for NonNull<T> {
1671 /// Converts a `&T` to a `NonNull<T>`.
1672 ///
1673 /// This conversion is safe and infallible since references cannot be null.
1674 #[inline]
1675 fn from(r: &T) -> Self {
1676 NonNull::from_ref(r)
1677 }
1678}