2 use crate::cmp::Ordering::{self, Equal, Greater, Less};
4 use crate::slice::SliceIndex;
7 impl<T: ?Sized> *mut T {
8 /// Returns `true` if the pointer is null.
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
20 /// let mut s = [1, 2, 3];
21 /// let ptr: *mut u32 = s.as_mut_ptr();
22 /// assert!(!ptr.is_null());
24 #[stable(feature = "rust1", since = "1.0.0")]
26 pub fn is_null(self) -> bool {
27 // Compare via a cast to a thin pointer, so fat pointers are only
28 // considering their "data" part for null-ness.
29 (self as *mut u8) == null_mut()
32 /// Casts to a pointer of another type.
33 #[stable(feature = "ptr_cast", since = "1.38.0")]
34 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
36 pub const fn cast<U>(self) -> *mut U {
40 /// Returns `None` if the pointer is null, or else returns a reference to
41 /// the value wrapped in `Some`.
45 /// While this method and its mutable counterpart are useful for
46 /// null-safety, it is important to note that this is still an unsafe
47 /// operation because the returned value could be pointing to invalid
50 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
51 /// all of the following is true:
52 /// - it is properly aligned
53 /// - it must point to an initialized instance of T; in particular, the pointer must be
54 /// "dereferencable" in the sense defined [here].
56 /// This applies even if the result of this method is unused!
57 /// (The part about being initialized is not yet fully decided, but until
58 /// it is, the only safe approach is to ensure that they are indeed initialized.)
60 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
61 /// not necessarily reflect the actual lifetime of the data. *You* must enforce
62 /// Rust's aliasing rules. In particular, for the duration of this lifetime,
63 /// the memory the pointer points to must not get mutated (except inside `UnsafeCell`).
65 /// [here]: crate::ptr#safety
72 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
75 /// if let Some(val_back) = ptr.as_ref() {
76 /// println!("We got back the value: {}!", val_back);
81 /// # Null-unchecked version
83 /// If you are sure the pointer can never be null and are looking for some kind of
84 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
85 /// dereference the pointer directly.
88 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
91 /// let val_back = &*ptr;
92 /// println!("We got back the value: {}!", val_back);
95 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
97 pub unsafe fn as_ref<'a>(self) -> Option<&'a T> {
98 // SAFETY: the caller must guarantee that `self` is valid for a
99 // reference if it isn't null.
100 if self.is_null() { None } else { unsafe { Some(&*self) } }
103 /// Calculates the offset from a pointer.
105 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
106 /// offset of `3 * size_of::<T>()` bytes.
110 /// If any of the following conditions are violated, the result is Undefined
113 /// * Both the starting and resulting pointer must be either in bounds or one
114 /// byte past the end of the same allocated object. Note that in Rust,
115 /// every (stack-allocated) variable is considered a separate allocated object.
117 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
119 /// * The offset being in bounds cannot rely on "wrapping around" the address
120 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
122 /// The compiler and standard library generally tries to ensure allocations
123 /// never reach a size where an offset is a concern. For instance, `Vec`
124 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
125 /// `vec.as_ptr().add(vec.len())` is always safe.
127 /// Most platforms fundamentally can't even construct such an allocation.
128 /// For instance, no known 64-bit platform can ever serve a request
129 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
130 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
131 /// more than `isize::MAX` bytes with things like Physical Address
132 /// Extension. As such, memory acquired directly from allocators or memory
133 /// mapped files *may* be too large to handle with this function.
135 /// Consider using [`wrapping_offset`] instead if these constraints are
136 /// difficult to satisfy. The only advantage of this method is that it
137 /// enables more aggressive compiler optimizations.
139 /// [`wrapping_offset`]: #method.wrapping_offset
146 /// let mut s = [1, 2, 3];
147 /// let ptr: *mut u32 = s.as_mut_ptr();
150 /// println!("{}", *ptr.offset(1));
151 /// println!("{}", *ptr.offset(2));
154 #[stable(feature = "rust1", since = "1.0.0")]
155 #[must_use = "returns a new pointer rather than modifying its argument"]
156 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
158 pub const unsafe fn offset(self, count: isize) -> *mut T
162 // SAFETY: the caller must uphold the safety contract for `offset`.
163 // The obtained pointer is valid for writes since the caller must
164 // guarantee that it points to the same allocated object as `self`.
165 unsafe { intrinsics::offset(self, count) as *mut T }
168 /// Calculates the offset from a pointer using wrapping arithmetic.
169 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
170 /// offset of `3 * size_of::<T>()` bytes.
174 /// The resulting pointer does not need to be in bounds, but it is
175 /// potentially hazardous to dereference (which requires `unsafe`).
177 /// In particular, the resulting pointer remains attached to the same allocated
178 /// object that `self` points to. It may *not* be used to access a
179 /// different allocated object. Note that in Rust,
180 /// every (stack-allocated) variable is considered a separate allocated object.
182 /// In other words, `x.wrapping_offset(y.wrapping_offset_from(x))` is
183 /// *not* the same as `y`, and dereferencing it is undefined behavior
184 /// unless `x` and `y` point into the same allocated object.
186 /// Compared to [`offset`], this method basically delays the requirement of staying
187 /// within the same allocated object: [`offset`] is immediate Undefined Behavior when
188 /// crossing object boundaries; `wrapping_offset` produces a pointer but still leads
189 /// to Undefined Behavior if that pointer is dereferenced. [`offset`] can be optimized
190 /// better and is thus preferable in performance-sensitive code.
192 /// If you need to cross object boundaries, cast the pointer to an integer and
193 /// do the arithmetic there.
195 /// [`offset`]: #method.offset
202 /// // Iterate using a raw pointer in increments of two elements
203 /// let mut data = [1u8, 2, 3, 4, 5];
204 /// let mut ptr: *mut u8 = data.as_mut_ptr();
206 /// let end_rounded_up = ptr.wrapping_offset(6);
208 /// while ptr != end_rounded_up {
212 /// ptr = ptr.wrapping_offset(step);
214 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
216 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
217 #[must_use = "returns a new pointer rather than modifying its argument"]
218 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
220 pub const fn wrapping_offset(self, count: isize) -> *mut T
224 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
225 unsafe { intrinsics::arith_offset(self, count) as *mut T }
228 /// Returns `None` if the pointer is null, or else returns a mutable
229 /// reference to the value wrapped in `Some`.
233 /// As with [`as_ref`], this is unsafe because it cannot verify the validity
234 /// of the returned pointer, nor can it ensure that the lifetime `'a`
235 /// returned is indeed a valid lifetime for the contained data.
237 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
238 /// all of the following is true:
239 /// - it is properly aligned
240 /// - it must point to an initialized instance of T; in particular, the pointer must be
241 /// "dereferenceable" in the sense defined [here].
243 /// This applies even if the result of this method is unused!
244 /// (The part about being initialized is not yet fully decided, but until
245 /// it is the only safe approach is to ensure that they are indeed initialized.)
247 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
248 /// not necessarily reflect the actual lifetime of the data. *You* must enforce
249 /// Rust's aliasing rules. In particular, for the duration of this lifetime,
250 /// the memory this pointer points to must not get accessed (read or written)
251 /// through any other pointer.
253 /// [here]: crate::ptr#safety
254 /// [`as_ref`]: #method.as_ref
261 /// let mut s = [1, 2, 3];
262 /// let ptr: *mut u32 = s.as_mut_ptr();
263 /// let first_value = unsafe { ptr.as_mut().unwrap() };
264 /// *first_value = 4;
265 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
268 /// # Null-unchecked version
270 /// If you are sure the pointer can never be null and are looking for some kind of
271 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
272 /// you can dereference the pointer directly.
275 /// let mut s = [1, 2, 3];
276 /// let ptr: *mut u32 = s.as_mut_ptr();
277 /// let first_value = unsafe { &mut *ptr };
278 /// *first_value = 4;
279 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
281 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
283 pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
284 // SAFETY: the caller must guarantee that `self` is be valid for
285 // a mutable reference if it isn't null.
286 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
289 /// Returns whether two pointers are guaranteed to be equal.
291 /// At runtime this function behaves like `self == other`.
292 /// However, in some contexts (e.g., compile-time evaluation),
293 /// it is not always possible to determine equality of two pointers, so this function may
294 /// spuriously return `false` for pointers that later actually turn out to be equal.
295 /// But when it returns `true`, the pointers are guaranteed to be equal.
297 /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer
298 /// comparisons for which both functions return `false`.
300 /// [`guaranteed_ne`]: #method.guaranteed_ne
302 /// The return value may change depending on the compiler version and unsafe code may not
303 /// rely on the result of this function for soundness. It is suggested to only use this function
304 /// for performance optimizations where spurious `false` return values by this function do not
305 /// affect the outcome, but just the performance.
306 /// The consequences of using this method to make runtime and compile-time code behave
307 /// differently have not been explored. This method should not be used to introduce such
308 /// differences, and it should also not be stabilized before we have a better understanding
310 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
311 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
313 pub const fn guaranteed_eq(self, other: *mut T) -> bool
317 intrinsics::ptr_guaranteed_eq(self as *const _, other as *const _)
320 /// Returns whether two pointers are guaranteed to be unequal.
322 /// At runtime this function behaves like `self != other`.
323 /// However, in some contexts (e.g., compile-time evaluation),
324 /// it is not always possible to determine the inequality of two pointers, so this function may
325 /// spuriously return `false` for pointers that later actually turn out to be unequal.
326 /// But when it returns `true`, the pointers are guaranteed to be unequal.
328 /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer
329 /// comparisons for which both functions return `false`.
331 /// [`guaranteed_eq`]: #method.guaranteed_eq
333 /// The return value may change depending on the compiler version and unsafe code may not
334 /// rely on the result of this function for soundness. It is suggested to only use this function
335 /// for performance optimizations where spurious `false` return values by this function do not
336 /// affect the outcome, but just the performance.
337 /// The consequences of using this method to make runtime and compile-time code behave
338 /// differently have not been explored. This method should not be used to introduce such
339 /// differences, and it should also not be stabilized before we have a better understanding
341 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
342 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
344 pub const unsafe fn guaranteed_ne(self, other: *mut T) -> bool
348 intrinsics::ptr_guaranteed_ne(self as *const _, other as *const _)
351 /// Calculates the distance between two pointers. The returned value is in
352 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
354 /// This function is the inverse of [`offset`].
356 /// [`offset`]: #method.offset-1
357 /// [`wrapping_offset_from`]: #method.wrapping_offset_from-1
361 /// If any of the following conditions are violated, the result is Undefined
364 /// * Both the starting and other pointer must be either in bounds or one
365 /// byte past the end of the same allocated object. Note that in Rust,
366 /// every (stack-allocated) variable is considered a separate allocated object.
368 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
370 /// * The distance between the pointers, in bytes, must be an exact multiple
371 /// of the size of `T`.
373 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
375 /// The compiler and standard library generally try to ensure allocations
376 /// never reach a size where an offset is a concern. For instance, `Vec`
377 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
378 /// `ptr_into_vec.offset_from(vec.as_ptr())` is always safe.
380 /// Most platforms fundamentally can't even construct such an allocation.
381 /// For instance, no known 64-bit platform can ever serve a request
382 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
383 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
384 /// more than `isize::MAX` bytes with things like Physical Address
385 /// Extension. As such, memory acquired directly from allocators or memory
386 /// mapped files *may* be too large to handle with this function.
388 /// Consider using [`wrapping_offset_from`] instead if these constraints are
389 /// difficult to satisfy. The only advantage of this method is that it
390 /// enables more aggressive compiler optimizations.
394 /// This function panics if `T` is a Zero-Sized Type ("ZST").
401 /// #![feature(ptr_offset_from)]
403 /// let mut a = [0; 5];
404 /// let ptr1: *mut i32 = &mut a[1];
405 /// let ptr2: *mut i32 = &mut a[3];
407 /// assert_eq!(ptr2.offset_from(ptr1), 2);
408 /// assert_eq!(ptr1.offset_from(ptr2), -2);
409 /// assert_eq!(ptr1.offset(2), ptr2);
410 /// assert_eq!(ptr2.offset(-2), ptr1);
413 #[unstable(feature = "ptr_offset_from", issue = "41079")]
414 #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
416 pub const unsafe fn offset_from(self, origin: *const T) -> isize
420 // SAFETY: the caller must uphold the safety contract for `offset_from`.
421 unsafe { (self as *const T).offset_from(origin) }
424 /// Calculates the distance between two pointers. The returned value is in
425 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
427 /// If the address different between the two pointers is not a multiple of
428 /// `mem::size_of::<T>()` then the result of the division is rounded towards
431 /// Though this method is safe for any two pointers, note that its result
432 /// will be mostly useless if the two pointers aren't into the same allocated
433 /// object, for example if they point to two different local variables.
437 /// This function panics if `T` is a zero-sized type.
444 /// #![feature(ptr_wrapping_offset_from)]
446 /// let mut a = [0; 5];
447 /// let ptr1: *mut i32 = &mut a[1];
448 /// let ptr2: *mut i32 = &mut a[3];
449 /// assert_eq!(ptr2.wrapping_offset_from(ptr1), 2);
450 /// assert_eq!(ptr1.wrapping_offset_from(ptr2), -2);
451 /// assert_eq!(ptr1.wrapping_offset(2), ptr2);
452 /// assert_eq!(ptr2.wrapping_offset(-2), ptr1);
454 /// let ptr1: *mut i32 = 3 as _;
455 /// let ptr2: *mut i32 = 13 as _;
456 /// assert_eq!(ptr2.wrapping_offset_from(ptr1), 2);
458 #[unstable(feature = "ptr_wrapping_offset_from", issue = "41079")]
461 reason = "Pointer distances across allocation \
462 boundaries are not typically meaningful. \
463 Use integer subtraction if you really need this."
466 pub fn wrapping_offset_from(self, origin: *const T) -> isize
470 #[allow(deprecated_in_future, deprecated)]
471 (self as *const T).wrapping_offset_from(origin)
474 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
476 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
477 /// offset of `3 * size_of::<T>()` bytes.
481 /// If any of the following conditions are violated, the result is Undefined
484 /// * Both the starting and resulting pointer must be either in bounds or one
485 /// byte past the end of the same allocated object. Note that in Rust,
486 /// every (stack-allocated) variable is considered a separate allocated object.
488 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
490 /// * The offset being in bounds cannot rely on "wrapping around" the address
491 /// space. That is, the infinite-precision sum must fit in a `usize`.
493 /// The compiler and standard library generally tries to ensure allocations
494 /// never reach a size where an offset is a concern. For instance, `Vec`
495 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
496 /// `vec.as_ptr().add(vec.len())` is always safe.
498 /// Most platforms fundamentally can't even construct such an allocation.
499 /// For instance, no known 64-bit platform can ever serve a request
500 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
501 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
502 /// more than `isize::MAX` bytes with things like Physical Address
503 /// Extension. As such, memory acquired directly from allocators or memory
504 /// mapped files *may* be too large to handle with this function.
506 /// Consider using [`wrapping_add`] instead if these constraints are
507 /// difficult to satisfy. The only advantage of this method is that it
508 /// enables more aggressive compiler optimizations.
510 /// [`wrapping_add`]: #method.wrapping_add
517 /// let s: &str = "123";
518 /// let ptr: *const u8 = s.as_ptr();
521 /// println!("{}", *ptr.add(1) as char);
522 /// println!("{}", *ptr.add(2) as char);
525 #[stable(feature = "pointer_methods", since = "1.26.0")]
526 #[must_use = "returns a new pointer rather than modifying its argument"]
527 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
529 pub const unsafe fn add(self, count: usize) -> Self
533 // SAFETY: the caller must uphold the safety contract for `offset`.
534 unsafe { self.offset(count as isize) }
537 /// Calculates the offset from a pointer (convenience for
538 /// `.offset((count as isize).wrapping_neg())`).
540 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
541 /// offset of `3 * size_of::<T>()` bytes.
545 /// If any of the following conditions are violated, the result is Undefined
548 /// * Both the starting and resulting pointer must be either in bounds or one
549 /// byte past the end of the same allocated object. Note that in Rust,
550 /// every (stack-allocated) variable is considered a separate allocated object.
552 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
554 /// * The offset being in bounds cannot rely on "wrapping around" the address
555 /// space. That is, the infinite-precision sum must fit in a usize.
557 /// The compiler and standard library generally tries to ensure allocations
558 /// never reach a size where an offset is a concern. For instance, `Vec`
559 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
560 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
562 /// Most platforms fundamentally can't even construct such an allocation.
563 /// For instance, no known 64-bit platform can ever serve a request
564 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
565 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
566 /// more than `isize::MAX` bytes with things like Physical Address
567 /// Extension. As such, memory acquired directly from allocators or memory
568 /// mapped files *may* be too large to handle with this function.
570 /// Consider using [`wrapping_sub`] instead if these constraints are
571 /// difficult to satisfy. The only advantage of this method is that it
572 /// enables more aggressive compiler optimizations.
574 /// [`wrapping_sub`]: #method.wrapping_sub
581 /// let s: &str = "123";
584 /// let end: *const u8 = s.as_ptr().add(3);
585 /// println!("{}", *end.sub(1) as char);
586 /// println!("{}", *end.sub(2) as char);
589 #[stable(feature = "pointer_methods", since = "1.26.0")]
590 #[must_use = "returns a new pointer rather than modifying its argument"]
591 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
593 pub const unsafe fn sub(self, count: usize) -> Self
597 // SAFETY: the caller must uphold the safety contract for `offset`.
598 unsafe { self.offset((count as isize).wrapping_neg()) }
601 /// Calculates the offset from a pointer using wrapping arithmetic.
602 /// (convenience for `.wrapping_offset(count as isize)`)
604 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
605 /// offset of `3 * size_of::<T>()` bytes.
609 /// The resulting pointer does not need to be in bounds, but it is
610 /// potentially hazardous to dereference (which requires `unsafe`).
612 /// In particular, the resulting pointer remains attached to the same allocated
613 /// object that `self` points to. It may *not* be used to access a
614 /// different allocated object. Note that in Rust,
615 /// every (stack-allocated) variable is considered a separate allocated object.
617 /// Compared to [`add`], this method basically delays the requirement of staying
618 /// within the same allocated object: [`add`] is immediate Undefined Behavior when
619 /// crossing object boundaries; `wrapping_add` produces a pointer but still leads
620 /// to Undefined Behavior if that pointer is dereferenced. [`add`] can be optimized
621 /// better and is thus preferable in performance-sensitive code.
623 /// If you need to cross object boundaries, cast the pointer to an integer and
624 /// do the arithmetic there.
626 /// [`add`]: #method.add
633 /// // Iterate using a raw pointer in increments of two elements
634 /// let data = [1u8, 2, 3, 4, 5];
635 /// let mut ptr: *const u8 = data.as_ptr();
637 /// let end_rounded_up = ptr.wrapping_add(6);
639 /// // This loop prints "1, 3, 5, "
640 /// while ptr != end_rounded_up {
642 /// print!("{}, ", *ptr);
644 /// ptr = ptr.wrapping_add(step);
647 #[stable(feature = "pointer_methods", since = "1.26.0")]
648 #[must_use = "returns a new pointer rather than modifying its argument"]
649 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
651 pub const fn wrapping_add(self, count: usize) -> Self
655 self.wrapping_offset(count as isize)
658 /// Calculates the offset from a pointer using wrapping arithmetic.
659 /// (convenience for `.wrapping_offset((count as isize).wrapping_sub())`)
661 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
662 /// offset of `3 * size_of::<T>()` bytes.
666 /// The resulting pointer does not need to be in bounds, but it is
667 /// potentially hazardous to dereference (which requires `unsafe`).
669 /// In particular, the resulting pointer remains attached to the same allocated
670 /// object that `self` points to. It may *not* be used to access a
671 /// different allocated object. Note that in Rust,
672 /// every (stack-allocated) variable is considered a separate allocated object.
674 /// Compared to [`sub`], this method basically delays the requirement of staying
675 /// within the same allocated object: [`sub`] is immediate Undefined Behavior when
676 /// crossing object boundaries; `wrapping_sub` produces a pointer but still leads
677 /// to Undefined Behavior if that pointer is dereferenced. [`sub`] can be optimized
678 /// better and is thus preferable in performance-sensitive code.
680 /// If you need to cross object boundaries, cast the pointer to an integer and
681 /// do the arithmetic there.
683 /// [`sub`]: #method.sub
690 /// // Iterate using a raw pointer in increments of two elements (backwards)
691 /// let data = [1u8, 2, 3, 4, 5];
692 /// let mut ptr: *const u8 = data.as_ptr();
693 /// let start_rounded_down = ptr.wrapping_sub(2);
694 /// ptr = ptr.wrapping_add(4);
696 /// // This loop prints "5, 3, 1, "
697 /// while ptr != start_rounded_down {
699 /// print!("{}, ", *ptr);
701 /// ptr = ptr.wrapping_sub(step);
704 #[stable(feature = "pointer_methods", since = "1.26.0")]
705 #[must_use = "returns a new pointer rather than modifying its argument"]
706 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
708 pub const fn wrapping_sub(self, count: usize) -> Self
712 self.wrapping_offset((count as isize).wrapping_neg())
715 /// Sets the pointer value to `ptr`.
717 /// In case `self` is a (fat) pointer to an unsized type, this operation
718 /// will only affect the pointer part, whereas for (thin) pointers to
719 /// sized types, this has the same effect as a simple assignment.
723 /// This function is primarily useful for allowing byte-wise pointer
724 /// arithmetic on potentially fat pointers:
727 /// #![feature(set_ptr_value)]
728 /// # use core::fmt::Debug;
729 /// let mut arr: [i32; 3] = [1, 2, 3];
730 /// let mut ptr = &mut arr[0] as *mut dyn Debug;
731 /// let thin = ptr as *mut u8;
732 /// ptr = ptr.set_ptr_value(unsafe { thin.add(8).cast() });
733 /// assert_eq!(unsafe { *(ptr as *mut i32) }, 3);
735 #[unstable(feature = "set_ptr_value", issue = "75091")]
737 pub fn set_ptr_value(mut self, val: *mut ()) -> Self {
738 let thin = &mut self as *mut *mut T as *mut *mut ();
739 // SAFETY: In case of a thin pointer, this operations is identical
740 // to a simple assignment. In case of a fat pointer, with the current
741 // fat pointer layout implementation, the first field of such a
742 // pointer is always the data pointer, which is likewise assigned.
743 unsafe { *thin = val };
747 /// Reads the value from `self` without moving it. This leaves the
748 /// memory in `self` unchanged.
750 /// See [`ptr::read`] for safety concerns and examples.
752 /// [`ptr::read`]: ./ptr/fn.read.html
753 #[stable(feature = "pointer_methods", since = "1.26.0")]
755 pub unsafe fn read(self) -> T
759 // SAFETY: the caller must uphold the safety contract for ``.
760 unsafe { read(self) }
763 /// Performs a volatile read of the value from `self` without moving it. This
764 /// leaves the memory in `self` unchanged.
766 /// Volatile operations are intended to act on I/O memory, and are guaranteed
767 /// to not be elided or reordered by the compiler across other volatile
770 /// See [`ptr::read_volatile`] for safety concerns and examples.
772 /// [`ptr::read_volatile`]: ./ptr/fn.read_volatile.html
773 #[stable(feature = "pointer_methods", since = "1.26.0")]
775 pub unsafe fn read_volatile(self) -> T
779 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
780 unsafe { read_volatile(self) }
783 /// Reads the value from `self` without moving it. This leaves the
784 /// memory in `self` unchanged.
786 /// Unlike `read`, the pointer may be unaligned.
788 /// See [`ptr::read_unaligned`] for safety concerns and examples.
790 /// [`ptr::read_unaligned`]: ./ptr/fn.read_unaligned.html
791 #[stable(feature = "pointer_methods", since = "1.26.0")]
793 pub unsafe fn read_unaligned(self) -> T
797 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
798 unsafe { read_unaligned(self) }
801 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
802 /// and destination may overlap.
804 /// NOTE: this has the *same* argument order as [`ptr::copy`].
806 /// See [`ptr::copy`] for safety concerns and examples.
808 /// [`ptr::copy`]: ./ptr/fn.copy.html
809 #[stable(feature = "pointer_methods", since = "1.26.0")]
811 pub unsafe fn copy_to(self, dest: *mut T, count: usize)
815 // SAFETY: the caller must uphold the safety contract for `copy`.
816 unsafe { copy(self, dest, count) }
819 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
820 /// and destination may *not* overlap.
822 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
824 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
826 /// [`ptr::copy_nonoverlapping`]: ./ptr/fn.copy_nonoverlapping.html
827 #[stable(feature = "pointer_methods", since = "1.26.0")]
829 pub unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
833 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
834 unsafe { copy_nonoverlapping(self, dest, count) }
837 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
838 /// and destination may overlap.
840 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
842 /// See [`ptr::copy`] for safety concerns and examples.
844 /// [`ptr::copy`]: ./ptr/fn.copy.html
845 #[stable(feature = "pointer_methods", since = "1.26.0")]
847 pub unsafe fn copy_from(self, src: *const T, count: usize)
851 // SAFETY: the caller must uphold the safety contract for `copy`.
852 unsafe { copy(src, self, count) }
855 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
856 /// and destination may *not* overlap.
858 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
860 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
862 /// [`ptr::copy_nonoverlapping`]: ./ptr/fn.copy_nonoverlapping.html
863 #[stable(feature = "pointer_methods", since = "1.26.0")]
865 pub unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
869 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
870 unsafe { copy_nonoverlapping(src, self, count) }
873 /// Executes the destructor (if any) of the pointed-to value.
875 /// See [`ptr::drop_in_place`] for safety concerns and examples.
877 /// [`ptr::drop_in_place`]: ./ptr/fn.drop_in_place.html
878 #[stable(feature = "pointer_methods", since = "1.26.0")]
880 pub unsafe fn drop_in_place(self) {
881 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
882 unsafe { drop_in_place(self) }
885 /// Overwrites a memory location with the given value without reading or
886 /// dropping the old value.
888 /// See [`ptr::write`] for safety concerns and examples.
890 /// [`ptr::write`]: ./ptr/fn.write.html
891 #[stable(feature = "pointer_methods", since = "1.26.0")]
893 pub unsafe fn write(self, val: T)
897 // SAFETY: the caller must uphold the safety contract for `write`.
898 unsafe { write(self, val) }
901 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
902 /// bytes of memory starting at `self` to `val`.
904 /// See [`ptr::write_bytes`] for safety concerns and examples.
906 /// [`ptr::write_bytes`]: ./ptr/fn.write_bytes.html
907 #[stable(feature = "pointer_methods", since = "1.26.0")]
909 pub unsafe fn write_bytes(self, val: u8, count: usize)
913 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
914 unsafe { write_bytes(self, val, count) }
917 /// Performs a volatile write of a memory location with the given value without
918 /// reading or dropping the old value.
920 /// Volatile operations are intended to act on I/O memory, and are guaranteed
921 /// to not be elided or reordered by the compiler across other volatile
924 /// See [`ptr::write_volatile`] for safety concerns and examples.
926 /// [`ptr::write_volatile`]: ./ptr/fn.write_volatile.html
927 #[stable(feature = "pointer_methods", since = "1.26.0")]
929 pub unsafe fn write_volatile(self, val: T)
933 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
934 unsafe { write_volatile(self, val) }
937 /// Overwrites a memory location with the given value without reading or
938 /// dropping the old value.
940 /// Unlike `write`, the pointer may be unaligned.
942 /// See [`ptr::write_unaligned`] for safety concerns and examples.
944 /// [`ptr::write_unaligned`]: ./ptr/fn.write_unaligned.html
945 #[stable(feature = "pointer_methods", since = "1.26.0")]
947 pub unsafe fn write_unaligned(self, val: T)
951 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
952 unsafe { write_unaligned(self, val) }
955 /// Replaces the value at `self` with `src`, returning the old
956 /// value, without dropping either.
958 /// See [`ptr::replace`] for safety concerns and examples.
960 /// [`ptr::replace`]: ./ptr/fn.replace.html
961 #[stable(feature = "pointer_methods", since = "1.26.0")]
963 pub unsafe fn replace(self, src: T) -> T
967 // SAFETY: the caller must uphold the safety contract for `replace`.
968 unsafe { replace(self, src) }
971 /// Swaps the values at two mutable locations of the same type, without
972 /// deinitializing either. They may overlap, unlike `mem::swap` which is
973 /// otherwise equivalent.
975 /// See [`ptr::swap`] for safety concerns and examples.
977 /// [`ptr::swap`]: ./ptr/fn.swap.html
978 #[stable(feature = "pointer_methods", since = "1.26.0")]
980 pub unsafe fn swap(self, with: *mut T)
984 // SAFETY: the caller must uphold the safety contract for `swap`.
985 unsafe { swap(self, with) }
988 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
991 /// If it is not possible to align the pointer, the implementation returns
992 /// `usize::MAX`. It is permissible for the implementation to *always*
993 /// return `usize::MAX`. Only your algorithm's performance can depend
994 /// on getting a usable offset here, not its correctness.
996 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
997 /// used with the `wrapping_add` method.
999 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1000 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1001 /// the returned offset is correct in all terms other than alignment.
1005 /// The function panics if `align` is not a power-of-two.
1009 /// Accessing adjacent `u8` as `u16`
1012 /// # fn foo(n: usize) {
1013 /// # use std::mem::align_of;
1015 /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
1016 /// let ptr = &x[n] as *const u8;
1017 /// let offset = ptr.align_offset(align_of::<u16>());
1018 /// if offset < x.len() - n - 1 {
1019 /// let u16_ptr = ptr.add(offset) as *const u16;
1020 /// assert_ne!(*u16_ptr, 500);
1022 /// // while the pointer can be aligned via `offset`, it would point
1023 /// // outside the allocation
1027 #[stable(feature = "align_offset", since = "1.36.0")]
1028 pub fn align_offset(self, align: usize) -> usize
1032 if !align.is_power_of_two() {
1033 panic!("align_offset: align is not a power-of-two");
1035 // SAFETY: `align` has been checked to be a power of 2 above
1036 unsafe { align_offset(self, align) }
1040 #[lang = "mut_slice_ptr"]
1042 /// Returns the length of a raw slice.
1044 /// The returned value is the number of **elements**, not the number of bytes.
1046 /// This function is safe, even when the raw slice cannot be cast to a slice
1047 /// reference because the pointer is null or unaligned.
1052 /// #![feature(slice_ptr_len)]
1055 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1056 /// assert_eq!(slice.len(), 3);
1059 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1060 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1061 pub const fn len(self) -> usize {
1062 // SAFETY: this is safe because `*const [T]` and `FatPtr<T>` have the same layout.
1063 // Only `std` can make this guarantee.
1064 unsafe { Repr { rust_mut: self }.raw }.len
1067 /// Returns a raw pointer to the slice's buffer.
1069 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1074 /// #![feature(slice_ptr_get)]
1077 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1078 /// assert_eq!(slice.as_mut_ptr(), 0 as *mut i8);
1081 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1082 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
1083 pub const fn as_mut_ptr(self) -> *mut T {
1087 /// Returns a raw pointer to an element or subslice, without doing bounds
1090 /// Calling this method with an out-of-bounds index or when `self` is not dereferencable
1091 /// is *[undefined behavior]* even if the resulting pointer is not used.
1093 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1098 /// #![feature(slice_ptr_get)]
1100 /// let x = &mut [1, 2, 4] as *mut [i32];
1103 /// assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1106 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1108 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1112 // SAFETY: the caller ensures that `self` is dereferencable and `index` in-bounds.
1113 unsafe { index.get_unchecked_mut(self) }
1117 // Equality for pointers
1118 #[stable(feature = "rust1", since = "1.0.0")]
1119 impl<T: ?Sized> PartialEq for *mut T {
1121 fn eq(&self, other: &*mut T) -> bool {
1126 #[stable(feature = "rust1", since = "1.0.0")]
1127 impl<T: ?Sized> Eq for *mut T {}
1129 #[stable(feature = "rust1", since = "1.0.0")]
1130 impl<T: ?Sized> Ord for *mut T {
1132 fn cmp(&self, other: &*mut T) -> Ordering {
1135 } else if self == other {
1143 #[stable(feature = "rust1", since = "1.0.0")]
1144 impl<T: ?Sized> PartialOrd for *mut T {
1146 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
1147 Some(self.cmp(other))
1151 fn lt(&self, other: &*mut T) -> bool {
1156 fn le(&self, other: &*mut T) -> bool {
1161 fn gt(&self, other: &*mut T) -> bool {
1166 fn ge(&self, other: &*mut T) -> bool {