2 use crate::cmp::Ordering::{self, Equal, Greater, Less};
5 use crate::slice::{self, SliceIndex};
8 impl<T: ?Sized> *const T {
9 /// Returns `true` if the pointer is null.
11 /// Note that unsized types have many possible null pointers, as only the
12 /// raw data pointer is considered, not their length, vtable, etc.
13 /// Therefore, two pointers that are null may still not compare equal to
16 /// ## Behavior during const evaluation
18 /// When this function is used during const evaluation, it may return `false` for pointers
19 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
20 /// is offset beyond its bounds in such a way that the resulting pointer is null,
21 /// the function will still return `false`. There is no way for CTFE to know
22 /// the absolute position of that memory, so we cannot tell if the pointer is
30 /// let s: &str = "Follow the rabbit";
31 /// let ptr: *const u8 = s.as_ptr();
32 /// assert!(!ptr.is_null());
34 #[stable(feature = "rust1", since = "1.0.0")]
35 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
37 pub const fn is_null(self) -> bool {
38 // Compare via a cast to a thin pointer, so fat pointers are only
39 // considering their "data" part for null-ness.
40 (self as *const u8).guaranteed_eq(null())
43 /// Casts to a pointer of another type.
44 #[stable(feature = "ptr_cast", since = "1.38.0")]
45 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
47 pub const fn cast<U>(self) -> *const U {
51 /// Decompose a (possibly wide) pointer into is address and metadata components.
53 /// The pointer can be later reconstructed with [`from_raw_parts`].
54 #[cfg(not(bootstrap))]
55 #[unstable(feature = "ptr_metadata", issue = "81513")]
56 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
58 pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
59 (self.cast(), metadata(self))
62 /// Returns `None` if the pointer is null, or else returns a shared reference to
63 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
64 /// must be used instead.
66 /// [`as_uninit_ref`]: #method.as_uninit_ref
70 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
71 /// all of the following is true:
73 /// * The pointer must be properly aligned.
75 /// * It must be "dereferencable" in the sense defined in [the module documentation].
77 /// * The pointer must point to an initialized instance of `T`.
79 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
80 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
81 /// In particular, for the duration of this lifetime, the memory the pointer points to must
82 /// not get mutated (except inside `UnsafeCell`).
84 /// This applies even if the result of this method is unused!
85 /// (The part about being initialized is not yet fully decided, but until
86 /// it is, the only safe approach is to ensure that they are indeed initialized.)
88 /// [the module documentation]: crate::ptr#safety
95 /// let ptr: *const u8 = &10u8 as *const u8;
98 /// if let Some(val_back) = ptr.as_ref() {
99 /// println!("We got back the value: {}!", val_back);
104 /// # Null-unchecked version
106 /// If you are sure the pointer can never be null and are looking for some kind of
107 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
108 /// dereference the pointer directly.
111 /// let ptr: *const u8 = &10u8 as *const u8;
114 /// let val_back = &*ptr;
115 /// println!("We got back the value: {}!", val_back);
118 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
120 pub unsafe fn as_ref<'a>(self) -> Option<&'a T> {
121 // SAFETY: the caller must guarantee that `self` is valid
122 // for a reference if it isn't null.
123 if self.is_null() { None } else { unsafe { Some(&*self) } }
126 /// Returns `None` if the pointer is null, or else returns a shared reference to
127 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
128 /// that the value has to be initialized.
130 /// [`as_ref`]: #method.as_ref
134 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
135 /// all of the following is true:
137 /// * The pointer must be properly aligned.
139 /// * It must be "dereferencable" in the sense defined in [the module documentation].
141 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
142 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
143 /// In particular, for the duration of this lifetime, the memory the pointer points to must
144 /// not get mutated (except inside `UnsafeCell`).
146 /// This applies even if the result of this method is unused!
148 /// [the module documentation]: crate::ptr#safety
155 /// #![feature(ptr_as_uninit)]
157 /// let ptr: *const u8 = &10u8 as *const u8;
160 /// if let Some(val_back) = ptr.as_uninit_ref() {
161 /// println!("We got back the value: {}!", val_back.assume_init());
166 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
167 pub unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
171 // SAFETY: the caller must guarantee that `self` meets all the
172 // requirements for a reference.
173 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
176 /// Calculates the offset from a pointer.
178 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
179 /// offset of `3 * size_of::<T>()` bytes.
183 /// If any of the following conditions are violated, the result is Undefined
186 /// * Both the starting and resulting pointer must be either in bounds or one
187 /// byte past the end of the same allocated object. Note that in Rust,
188 /// every (stack-allocated) variable is considered a separate allocated object.
190 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
192 /// * The offset being in bounds cannot rely on "wrapping around" the address
193 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
195 /// The compiler and standard library generally tries to ensure allocations
196 /// never reach a size where an offset is a concern. For instance, `Vec`
197 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
198 /// `vec.as_ptr().add(vec.len())` is always safe.
200 /// Most platforms fundamentally can't even construct such an allocation.
201 /// For instance, no known 64-bit platform can ever serve a request
202 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
203 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
204 /// more than `isize::MAX` bytes with things like Physical Address
205 /// Extension. As such, memory acquired directly from allocators or memory
206 /// mapped files *may* be too large to handle with this function.
208 /// Consider using [`wrapping_offset`] instead if these constraints are
209 /// difficult to satisfy. The only advantage of this method is that it
210 /// enables more aggressive compiler optimizations.
212 /// [`wrapping_offset`]: #method.wrapping_offset
219 /// let s: &str = "123";
220 /// let ptr: *const u8 = s.as_ptr();
223 /// println!("{}", *ptr.offset(1) as char);
224 /// println!("{}", *ptr.offset(2) as char);
227 #[stable(feature = "rust1", since = "1.0.0")]
228 #[must_use = "returns a new pointer rather than modifying its argument"]
229 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
231 pub const unsafe fn offset(self, count: isize) -> *const T
235 // SAFETY: the caller must uphold the safety contract for `offset`.
236 unsafe { intrinsics::offset(self, count) }
239 /// Calculates the offset from a pointer using wrapping arithmetic.
241 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
242 /// offset of `3 * size_of::<T>()` bytes.
246 /// This operation itself is always safe, but using the resulting pointer is not.
248 /// The resulting pointer remains attached to the same allocated object that `self` points to.
249 /// It may *not* be used to access a different allocated object. Note that in Rust, every
250 /// (stack-allocated) variable is considered a separate allocated object.
252 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
253 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
254 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
255 /// `x` and `y` point into the same allocated object.
257 /// Compared to [`offset`], this method basically delays the requirement of staying within the
258 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
259 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
260 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
261 /// can be optimized better and is thus preferable in performance-sensitive code.
263 /// The delayed check only considers the value of the pointer that was dereferenced, not the
264 /// intermediate values used during the computation of the final result. For example,
265 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
266 /// words, leaving the allocated object and then re-entering it later is permitted.
268 /// If you need to cross object boundaries, cast the pointer to an integer and
269 /// do the arithmetic there.
271 /// [`offset`]: #method.offset
278 /// // Iterate using a raw pointer in increments of two elements
279 /// let data = [1u8, 2, 3, 4, 5];
280 /// let mut ptr: *const u8 = data.as_ptr();
282 /// let end_rounded_up = ptr.wrapping_offset(6);
284 /// // This loop prints "1, 3, 5, "
285 /// while ptr != end_rounded_up {
287 /// print!("{}, ", *ptr);
289 /// ptr = ptr.wrapping_offset(step);
292 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
293 #[must_use = "returns a new pointer rather than modifying its argument"]
294 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
296 pub const fn wrapping_offset(self, count: isize) -> *const T
300 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
301 unsafe { intrinsics::arith_offset(self, count) }
304 /// Calculates the distance between two pointers. The returned value is in
305 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
307 /// This function is the inverse of [`offset`].
309 /// [`offset`]: #method.offset
313 /// If any of the following conditions are violated, the result is Undefined
316 /// * Both the starting and other pointer must be either in bounds or one
317 /// byte past the end of the same allocated object. Note that in Rust,
318 /// every (stack-allocated) variable is considered a separate allocated object.
320 /// * Both pointers must be *derived from* a pointer to the same object.
321 /// (See below for an example.)
323 /// * The distance between the pointers, in bytes, must be an exact multiple
324 /// of the size of `T`.
326 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
328 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
330 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
331 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
332 /// the last two conditions. The standard library also generally ensures that allocations
333 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
334 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
335 /// always satisfies the last two conditions.
337 /// Most platforms fundamentally can't even construct such a large allocation.
338 /// For instance, no known 64-bit platform can ever serve a request
339 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
340 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
341 /// more than `isize::MAX` bytes with things like Physical Address
342 /// Extension. As such, memory acquired directly from allocators or memory
343 /// mapped files *may* be too large to handle with this function.
344 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
345 /// such large allocations either.)
347 /// [`add`]: #method.add
351 /// This function panics if `T` is a Zero-Sized Type ("ZST").
359 /// let ptr1: *const i32 = &a[1];
360 /// let ptr2: *const i32 = &a[3];
362 /// assert_eq!(ptr2.offset_from(ptr1), 2);
363 /// assert_eq!(ptr1.offset_from(ptr2), -2);
364 /// assert_eq!(ptr1.offset(2), ptr2);
365 /// assert_eq!(ptr2.offset(-2), ptr1);
369 /// *Incorrect* usage:
372 /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
373 /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
374 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
375 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
376 /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff);
377 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
378 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
379 /// // computing their offset is undefined behavior, even though
380 /// // they point to the same address!
382 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
385 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
386 #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
388 pub const unsafe fn offset_from(self, origin: *const T) -> isize
392 let pointee_size = mem::size_of::<T>();
393 assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
394 // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
395 unsafe { intrinsics::ptr_offset_from(self, origin) }
398 /// Returns whether two pointers are guaranteed to be equal.
400 /// At runtime this function behaves like `self == other`.
401 /// However, in some contexts (e.g., compile-time evaluation),
402 /// it is not always possible to determine equality of two pointers, so this function may
403 /// spuriously return `false` for pointers that later actually turn out to be equal.
404 /// But when it returns `true`, the pointers are guaranteed to be equal.
406 /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer
407 /// comparisons for which both functions return `false`.
409 /// [`guaranteed_ne`]: #method.guaranteed_ne
411 /// The return value may change depending on the compiler version and unsafe code may not
412 /// rely on the result of this function for soundness. It is suggested to only use this function
413 /// for performance optimizations where spurious `false` return values by this function do not
414 /// affect the outcome, but just the performance.
415 /// The consequences of using this method to make runtime and compile-time code behave
416 /// differently have not been explored. This method should not be used to introduce such
417 /// differences, and it should also not be stabilized before we have a better understanding
419 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
420 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
422 pub const fn guaranteed_eq(self, other: *const T) -> bool
426 intrinsics::ptr_guaranteed_eq(self, other)
429 /// Returns whether two pointers are guaranteed to be unequal.
431 /// At runtime this function behaves like `self != other`.
432 /// However, in some contexts (e.g., compile-time evaluation),
433 /// it is not always possible to determine the inequality of two pointers, so this function may
434 /// spuriously return `false` for pointers that later actually turn out to be unequal.
435 /// But when it returns `true`, the pointers are guaranteed to be unequal.
437 /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer
438 /// comparisons for which both functions return `false`.
440 /// [`guaranteed_eq`]: #method.guaranteed_eq
442 /// The return value may change depending on the compiler version and unsafe code may not
443 /// rely on the result of this function for soundness. It is suggested to only use this function
444 /// for performance optimizations where spurious `false` return values by this function do not
445 /// affect the outcome, but just the performance.
446 /// The consequences of using this method to make runtime and compile-time code behave
447 /// differently have not been explored. This method should not be used to introduce such
448 /// differences, and it should also not be stabilized before we have a better understanding
450 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
451 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
453 pub const fn guaranteed_ne(self, other: *const T) -> bool
457 intrinsics::ptr_guaranteed_ne(self, other)
460 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
462 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
463 /// offset of `3 * size_of::<T>()` bytes.
467 /// If any of the following conditions are violated, the result is Undefined
470 /// * Both the starting and resulting pointer must be either in bounds or one
471 /// byte past the end of the same allocated object. Note that in Rust,
472 /// every (stack-allocated) variable is considered a separate allocated object.
474 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
476 /// * The offset being in bounds cannot rely on "wrapping around" the address
477 /// space. That is, the infinite-precision sum must fit in a `usize`.
479 /// The compiler and standard library generally tries to ensure allocations
480 /// never reach a size where an offset is a concern. For instance, `Vec`
481 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
482 /// `vec.as_ptr().add(vec.len())` is always safe.
484 /// Most platforms fundamentally can't even construct such an allocation.
485 /// For instance, no known 64-bit platform can ever serve a request
486 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
487 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
488 /// more than `isize::MAX` bytes with things like Physical Address
489 /// Extension. As such, memory acquired directly from allocators or memory
490 /// mapped files *may* be too large to handle with this function.
492 /// Consider using [`wrapping_add`] instead if these constraints are
493 /// difficult to satisfy. The only advantage of this method is that it
494 /// enables more aggressive compiler optimizations.
496 /// [`wrapping_add`]: #method.wrapping_add
503 /// let s: &str = "123";
504 /// let ptr: *const u8 = s.as_ptr();
507 /// println!("{}", *ptr.add(1) as char);
508 /// println!("{}", *ptr.add(2) as char);
511 #[stable(feature = "pointer_methods", since = "1.26.0")]
512 #[must_use = "returns a new pointer rather than modifying its argument"]
513 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
515 pub const unsafe fn add(self, count: usize) -> Self
519 // SAFETY: the caller must uphold the safety contract for `offset`.
520 unsafe { self.offset(count as isize) }
523 /// Calculates the offset from a pointer (convenience for
524 /// `.offset((count as isize).wrapping_neg())`).
526 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
527 /// offset of `3 * size_of::<T>()` bytes.
531 /// If any of the following conditions are violated, the result is Undefined
534 /// * Both the starting and resulting pointer must be either in bounds or one
535 /// byte past the end of the same allocated object. Note that in Rust,
536 /// every (stack-allocated) variable is considered a separate allocated object.
538 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
540 /// * The offset being in bounds cannot rely on "wrapping around" the address
541 /// space. That is, the infinite-precision sum must fit in a usize.
543 /// The compiler and standard library generally tries to ensure allocations
544 /// never reach a size where an offset is a concern. For instance, `Vec`
545 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
546 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
548 /// Most platforms fundamentally can't even construct such an allocation.
549 /// For instance, no known 64-bit platform can ever serve a request
550 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
551 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
552 /// more than `isize::MAX` bytes with things like Physical Address
553 /// Extension. As such, memory acquired directly from allocators or memory
554 /// mapped files *may* be too large to handle with this function.
556 /// Consider using [`wrapping_sub`] instead if these constraints are
557 /// difficult to satisfy. The only advantage of this method is that it
558 /// enables more aggressive compiler optimizations.
560 /// [`wrapping_sub`]: #method.wrapping_sub
567 /// let s: &str = "123";
570 /// let end: *const u8 = s.as_ptr().add(3);
571 /// println!("{}", *end.sub(1) as char);
572 /// println!("{}", *end.sub(2) as char);
575 #[stable(feature = "pointer_methods", since = "1.26.0")]
576 #[must_use = "returns a new pointer rather than modifying its argument"]
577 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
579 pub const unsafe fn sub(self, count: usize) -> Self
583 // SAFETY: the caller must uphold the safety contract for `offset`.
584 unsafe { self.offset((count as isize).wrapping_neg()) }
587 /// Calculates the offset from a pointer using wrapping arithmetic.
588 /// (convenience for `.wrapping_offset(count as isize)`)
590 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
591 /// offset of `3 * size_of::<T>()` bytes.
595 /// This operation itself is always safe, but using the resulting pointer is not.
597 /// The resulting pointer remains attached to the same allocated object that `self` points to.
598 /// It may *not* be used to access a different allocated object. Note that in Rust, every
599 /// (stack-allocated) variable is considered a separate allocated object.
601 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
602 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
603 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
604 /// `x` and `y` point into the same allocated object.
606 /// Compared to [`add`], this method basically delays the requirement of staying within the
607 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
608 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
609 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
610 /// can be optimized better and is thus preferable in performance-sensitive code.
612 /// The delayed check only considers the value of the pointer that was dereferenced, not the
613 /// intermediate values used during the computation of the final result. For example,
614 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
615 /// allocated object and then re-entering it later is permitted.
617 /// If you need to cross object boundaries, cast the pointer to an integer and
618 /// do the arithmetic there.
620 /// [`add`]: #method.add
627 /// // Iterate using a raw pointer in increments of two elements
628 /// let data = [1u8, 2, 3, 4, 5];
629 /// let mut ptr: *const u8 = data.as_ptr();
631 /// let end_rounded_up = ptr.wrapping_add(6);
633 /// // This loop prints "1, 3, 5, "
634 /// while ptr != end_rounded_up {
636 /// print!("{}, ", *ptr);
638 /// ptr = ptr.wrapping_add(step);
641 #[stable(feature = "pointer_methods", since = "1.26.0")]
642 #[must_use = "returns a new pointer rather than modifying its argument"]
643 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
645 pub const fn wrapping_add(self, count: usize) -> Self
649 self.wrapping_offset(count as isize)
652 /// Calculates the offset from a pointer using wrapping arithmetic.
653 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
655 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
656 /// offset of `3 * size_of::<T>()` bytes.
660 /// This operation itself is always safe, but using the resulting pointer is not.
662 /// The resulting pointer remains attached to the same allocated object that `self` points to.
663 /// It may *not* be used to access a different allocated object. Note that in Rust, every
664 /// (stack-allocated) variable is considered a separate allocated object.
666 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
667 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
668 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
669 /// `x` and `y` point into the same allocated object.
671 /// Compared to [`sub`], this method basically delays the requirement of staying within the
672 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
673 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
674 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
675 /// can be optimized better and is thus preferable in performance-sensitive code.
677 /// The delayed check only considers the value of the pointer that was dereferenced, not the
678 /// intermediate values used during the computation of the final result. For example,
679 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
680 /// allocated object and then re-entering it later is permitted.
682 /// If you need to cross object boundaries, cast the pointer to an integer and
683 /// do the arithmetic there.
685 /// [`sub`]: #method.sub
692 /// // Iterate using a raw pointer in increments of two elements (backwards)
693 /// let data = [1u8, 2, 3, 4, 5];
694 /// let mut ptr: *const u8 = data.as_ptr();
695 /// let start_rounded_down = ptr.wrapping_sub(2);
696 /// ptr = ptr.wrapping_add(4);
698 /// // This loop prints "5, 3, 1, "
699 /// while ptr != start_rounded_down {
701 /// print!("{}, ", *ptr);
703 /// ptr = ptr.wrapping_sub(step);
706 #[stable(feature = "pointer_methods", since = "1.26.0")]
707 #[must_use = "returns a new pointer rather than modifying its argument"]
708 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
710 pub const fn wrapping_sub(self, count: usize) -> Self
714 self.wrapping_offset((count as isize).wrapping_neg())
717 /// Sets the pointer value to `ptr`.
719 /// In case `self` is a (fat) pointer to an unsized type, this operation
720 /// will only affect the pointer part, whereas for (thin) pointers to
721 /// sized types, this has the same effect as a simple assignment.
723 /// The resulting pointer will have provenance of `val`, i.e., for a fat
724 /// pointer, this operation is semantically the same as creating a new
725 /// fat pointer with the data pointer value of `val` but the metadata of
730 /// This function is primarily useful for allowing byte-wise pointer
731 /// arithmetic on potentially fat pointers:
734 /// #![feature(set_ptr_value)]
735 /// # use core::fmt::Debug;
736 /// let arr: [i32; 3] = [1, 2, 3];
737 /// let mut ptr = &arr[0] as *const dyn Debug;
738 /// let thin = ptr as *const u8;
740 /// ptr = ptr.set_ptr_value(thin.add(8));
741 /// # assert_eq!(*(ptr as *const i32), 3);
742 /// println!("{:?}", &*ptr); // will print "3"
745 #[unstable(feature = "set_ptr_value", issue = "75091")]
746 #[must_use = "returns a new pointer rather than modifying its argument"]
748 pub fn set_ptr_value(mut self, val: *const u8) -> Self {
749 let thin = &mut self as *mut *const T as *mut *const u8;
750 // SAFETY: In case of a thin pointer, this operations is identical
751 // to a simple assignment. In case of a fat pointer, with the current
752 // fat pointer layout implementation, the first field of such a
753 // pointer is always the data pointer, which is likewise assigned.
754 unsafe { *thin = val };
758 /// Reads the value from `self` without moving it. This leaves the
759 /// memory in `self` unchanged.
761 /// See [`ptr::read`] for safety concerns and examples.
763 /// [`ptr::read`]: crate::ptr::read()
764 #[stable(feature = "pointer_methods", since = "1.26.0")]
765 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
767 pub const unsafe fn read(self) -> T
771 // SAFETY: the caller must uphold the safety contract for `read`.
772 unsafe { read(self) }
775 /// Performs a volatile read of the value from `self` without moving it. This
776 /// leaves the memory in `self` unchanged.
778 /// Volatile operations are intended to act on I/O memory, and are guaranteed
779 /// to not be elided or reordered by the compiler across other volatile
782 /// See [`ptr::read_volatile`] for safety concerns and examples.
784 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
785 #[stable(feature = "pointer_methods", since = "1.26.0")]
787 pub unsafe fn read_volatile(self) -> T
791 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
792 unsafe { read_volatile(self) }
795 /// Reads the value from `self` without moving it. This leaves the
796 /// memory in `self` unchanged.
798 /// Unlike `read`, the pointer may be unaligned.
800 /// See [`ptr::read_unaligned`] for safety concerns and examples.
802 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
803 #[stable(feature = "pointer_methods", since = "1.26.0")]
804 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
806 pub const unsafe fn read_unaligned(self) -> T
810 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
811 unsafe { read_unaligned(self) }
814 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
815 /// and destination may overlap.
817 /// NOTE: this has the *same* argument order as [`ptr::copy`].
819 /// See [`ptr::copy`] for safety concerns and examples.
821 /// [`ptr::copy`]: crate::ptr::copy()
822 #[stable(feature = "pointer_methods", since = "1.26.0")]
824 pub unsafe fn copy_to(self, dest: *mut T, count: usize)
828 // SAFETY: the caller must uphold the safety contract for `copy`.
829 unsafe { copy(self, dest, count) }
832 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
833 /// and destination may *not* overlap.
835 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
837 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
839 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
840 #[stable(feature = "pointer_methods", since = "1.26.0")]
842 pub unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
846 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
847 unsafe { copy_nonoverlapping(self, dest, count) }
850 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
853 /// If it is not possible to align the pointer, the implementation returns
854 /// `usize::MAX`. It is permissible for the implementation to *always*
855 /// return `usize::MAX`. Only your algorithm's performance can depend
856 /// on getting a usable offset here, not its correctness.
858 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
859 /// used with the `wrapping_add` method.
861 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
862 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
863 /// the returned offset is correct in all terms other than alignment.
867 /// The function panics if `align` is not a power-of-two.
871 /// Accessing adjacent `u8` as `u16`
874 /// # fn foo(n: usize) {
875 /// # use std::mem::align_of;
877 /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
878 /// let ptr = x.as_ptr().add(n) as *const u8;
879 /// let offset = ptr.align_offset(align_of::<u16>());
880 /// if offset < x.len() - n - 1 {
881 /// let u16_ptr = ptr.add(offset) as *const u16;
882 /// assert_ne!(*u16_ptr, 500);
884 /// // while the pointer can be aligned via `offset`, it would point
885 /// // outside the allocation
889 #[stable(feature = "align_offset", since = "1.36.0")]
890 pub fn align_offset(self, align: usize) -> usize
894 if !align.is_power_of_two() {
895 panic!("align_offset: align is not a power-of-two");
897 // SAFETY: `align` has been checked to be a power of 2 above
898 unsafe { align_offset(self, align) }
902 #[lang = "const_slice_ptr"]
904 /// Returns the length of a raw slice.
906 /// The returned value is the number of **elements**, not the number of bytes.
908 /// This function is safe, even when the raw slice cannot be cast to a slice
909 /// reference because the pointer is null or unaligned.
914 /// #![feature(slice_ptr_len)]
918 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
919 /// assert_eq!(slice.len(), 3);
922 #[unstable(feature = "slice_ptr_len", issue = "71146")]
923 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
924 pub const fn len(self) -> usize {
927 // SAFETY: this is safe because `*const [T]` and `FatPtr<T>` have the same layout.
928 // Only `std` can make this guarantee.
929 unsafe { Repr { rust: self }.raw }.len
931 #[cfg(not(bootstrap))]
935 /// Returns a raw pointer to the slice's buffer.
937 /// This is equivalent to casting `self` to `*const T`, but more type-safe.
942 /// #![feature(slice_ptr_get)]
945 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
946 /// assert_eq!(slice.as_ptr(), 0 as *const i8);
949 #[unstable(feature = "slice_ptr_get", issue = "74265")]
950 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
951 pub const fn as_ptr(self) -> *const T {
955 /// Returns a raw pointer to an element or subslice, without doing bounds
958 /// Calling this method with an out-of-bounds index or when `self` is not dereferencable
959 /// is *[undefined behavior]* even if the resulting pointer is not used.
961 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
966 /// #![feature(slice_ptr_get)]
968 /// let x = &[1, 2, 4] as *const [i32];
971 /// assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
974 #[unstable(feature = "slice_ptr_get", issue = "74265")]
976 pub unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
980 // SAFETY: the caller ensures that `self` is dereferencable and `index` in-bounds.
981 unsafe { index.get_unchecked(self) }
984 /// Returns `None` if the pointer is null, or else returns a shared slice to
985 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
986 /// that the value has to be initialized.
988 /// [`as_ref`]: #method.as_ref
992 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
993 /// all of the following is true:
995 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
996 /// and it must be properly aligned. This means in particular:
998 /// * The entire memory range of this slice must be contained within a single allocated object!
999 /// Slices can never span across multiple allocated objects.
1001 /// * The pointer must be aligned even for zero-length slices. One
1002 /// reason for this is that enum layout optimizations may rely on references
1003 /// (including slices of any length) being aligned and non-null to distinguish
1004 /// them from other data. You can obtain a pointer that is usable as `data`
1005 /// for zero-length slices using [`NonNull::dangling()`].
1007 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1008 /// See the safety documentation of [`pointer::offset`].
1010 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1011 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1012 /// In particular, for the duration of this lifetime, the memory the pointer points to must
1013 /// not get mutated (except inside `UnsafeCell`).
1015 /// This applies even if the result of this method is unused!
1017 /// See also [`slice::from_raw_parts`][].
1019 /// [valid]: crate::ptr#safety
1021 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1022 pub unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1026 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1027 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1032 // Equality for pointers
1033 #[stable(feature = "rust1", since = "1.0.0")]
1034 impl<T: ?Sized> PartialEq for *const T {
1036 fn eq(&self, other: &*const T) -> bool {
1041 #[stable(feature = "rust1", since = "1.0.0")]
1042 impl<T: ?Sized> Eq for *const T {}
1044 // Comparison for pointers
1045 #[stable(feature = "rust1", since = "1.0.0")]
1046 impl<T: ?Sized> Ord for *const T {
1048 fn cmp(&self, other: &*const T) -> Ordering {
1051 } else if self == other {
1059 #[stable(feature = "rust1", since = "1.0.0")]
1060 impl<T: ?Sized> PartialOrd for *const T {
1062 fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1063 Some(self.cmp(other))
1067 fn lt(&self, other: &*const T) -> bool {
1072 fn le(&self, other: &*const T) -> bool {
1077 fn gt(&self, other: &*const T) -> bool {
1082 fn ge(&self, other: &*const T) -> bool {