2 use crate::cmp::Ordering::{self, Equal, Greater, Less};
4 use crate::slice::{self, SliceIndex};
6 #[cfg_attr(bootstrap, lang = "mut_ptr")]
7 impl<T: ?Sized> *mut T {
8 /// Returns `true` if the pointer is null.
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
15 /// ## Behavior during const evaluation
17 /// When this function is used during const evaluation, it may return `false` for pointers
18 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
19 /// is offset beyond its bounds in such a way that the resulting pointer is null,
20 /// the function will still return `false`. There is no way for CTFE to know
21 /// the absolute position of that memory, so we cannot tell if the pointer is
29 /// let mut s = [1, 2, 3];
30 /// let ptr: *mut u32 = s.as_mut_ptr();
31 /// assert!(!ptr.is_null());
33 #[stable(feature = "rust1", since = "1.0.0")]
34 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
36 pub const fn is_null(self) -> bool {
37 // Compare via a cast to a thin pointer, so fat pointers are only
38 // considering their "data" part for null-ness.
39 (self as *mut u8).guaranteed_eq(null_mut())
42 /// Casts to a pointer of another type.
43 #[stable(feature = "ptr_cast", since = "1.38.0")]
44 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
46 pub const fn cast<U>(self) -> *mut U {
50 /// Use the pointer value in a new pointer of another type.
52 /// In case `val` is a (fat) pointer to an unsized type, this operation
53 /// will ignore the pointer part, whereas for (thin) pointers to sized
54 /// types, this has the same effect as a simple cast.
56 /// The resulting pointer will have provenance of `self`, i.e., for a fat
57 /// pointer, this operation is semantically the same as creating a new
58 /// fat pointer with the data pointer value of `self` but the metadata of
63 /// This function is primarily useful for allowing byte-wise pointer
64 /// arithmetic on potentially fat pointers:
67 /// #![feature(set_ptr_value)]
68 /// # use core::fmt::Debug;
69 /// let mut arr: [i32; 3] = [1, 2, 3];
70 /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
71 /// let thin = ptr as *mut u8;
73 /// ptr = thin.add(8).with_metadata_of(ptr);
74 /// # assert_eq!(*(ptr as *mut i32), 3);
75 /// println!("{:?}", &*ptr); // will print "3"
78 #[unstable(feature = "set_ptr_value", issue = "75091")]
79 #[must_use = "returns a new pointer rather than modifying its argument"]
81 pub fn with_metadata_of<U>(self, mut val: *mut U) -> *mut U
85 let target = &mut val as *mut *mut U as *mut *mut u8;
86 // SAFETY: In case of a thin pointer, this operations is identical
87 // to a simple assignment. In case of a fat pointer, with the current
88 // fat pointer layout implementation, the first field of such a
89 // pointer is always the data pointer, which is likewise assigned.
90 unsafe { *target = self as *mut u8 };
94 /// Changes constness without changing the type.
96 /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
99 /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
100 /// with `as_mut()` on `*const T` and may have documentation value if used instead of implicit
102 #[unstable(feature = "ptr_const_cast", issue = "92675")]
103 #[rustc_const_unstable(feature = "ptr_const_cast", issue = "92675")]
104 pub const fn as_const(self) -> *const T {
108 /// Casts a pointer to its raw bits.
110 /// This is equivalent to `as usize`, but is more specific to enhance readability.
111 /// The inverse method is [`from_bits`](#method.from_bits-1).
113 /// In particular, `*p as usize` and `p as usize` will both compile for
114 /// pointers to numeric types but do very different things, so using this
115 /// helps emphasize that reading the bits was intentional.
120 /// #![feature(ptr_to_from_bits)]
121 /// let mut array = [13, 42];
122 /// let mut it = array.iter_mut();
123 /// let p0: *mut i32 = it.next().unwrap();
124 /// assert_eq!(<*mut _>::from_bits(p0.to_bits()), p0);
125 /// let p1: *mut i32 = it.next().unwrap();
126 /// assert_eq!(p1.to_bits() - p0.to_bits(), 4);
128 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
129 pub fn to_bits(self) -> usize
136 /// Creates a pointer from its raw bits.
138 /// This is equivalent to `as *mut T`, but is more specific to enhance readability.
139 /// The inverse method is [`to_bits`](#method.to_bits-1).
144 /// #![feature(ptr_to_from_bits)]
145 /// use std::ptr::NonNull;
146 /// let dangling: *mut u8 = NonNull::dangling().as_ptr();
147 /// assert_eq!(<*mut u8>::from_bits(1), dangling);
149 #[unstable(feature = "ptr_to_from_bits", issue = "91126")]
150 pub fn from_bits(bits: usize) -> Self
157 /// Gets the "address" portion of the pointer.
159 /// This is equivalent to `self as usize`, which semantically discards
160 /// *provenance* and *address-space* information. To properly restore that information,
161 /// use [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
163 /// On most platforms this will produce a value with the same bytes as the original
164 /// pointer, because all the bytes are dedicated to describing the address.
165 /// Platforms which need to store additional information in the pointer may
166 /// perform a change of representation to produce a value containing only the address
167 /// portion of the pointer. What that means is up to the platform to define.
169 /// This API and its claimed semantics are part of the Strict Provenance experiment,
170 /// see the [module documentation][crate::ptr] for details.
173 #[unstable(feature = "strict_provenance", issue = "95228")]
174 pub fn addr(self) -> usize
178 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
182 /// Creates a new pointer with the given address.
184 /// This performs the same operation as an `addr as ptr` cast, but copies
185 /// the *address-space* and *provenance* of `self` to the new pointer.
186 /// This allows us to dynamically preserve and propagate this important
187 /// information in a way that is otherwise impossible with a unary cast.
189 /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
190 /// `self` to the given address, and therefore has all the same capabilities and restrictions.
192 /// This API and its claimed semantics are part of the Strict Provenance experiment,
193 /// see the [module documentation][crate::ptr] for details.
196 #[unstable(feature = "strict_provenance", issue = "95228")]
197 pub fn with_addr(self, addr: usize) -> Self
201 // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
203 // In the mean-time, this operation is defined to be "as if" it was
204 // a wrapping_offset, so we can emulate it as such. This should properly
205 // restore pointer provenance even under today's compiler.
206 let self_addr = self.addr() as isize;
207 let dest_addr = addr as isize;
208 let offset = dest_addr.wrapping_sub(self_addr);
210 // This is the canonical desugarring of this operation
211 self.cast::<u8>().wrapping_offset(offset).cast::<T>()
214 /// Creates a new pointer by mapping `self`'s address to a new one.
216 /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
218 /// This API and its claimed semantics are part of the Strict Provenance experiment,
219 /// see the [module documentation][crate::ptr] for details.
222 #[unstable(feature = "strict_provenance", issue = "95228")]
223 pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self
227 self.with_addr(f(self.addr()))
230 /// Decompose a (possibly wide) pointer into its address and metadata components.
232 /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
233 #[unstable(feature = "ptr_metadata", issue = "81513")]
234 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
236 pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
237 (self.cast(), super::metadata(self))
240 /// Returns `None` if the pointer is null, or else returns a shared reference to
241 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
242 /// must be used instead.
244 /// For the mutable counterpart see [`as_mut`].
246 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
247 /// [`as_mut`]: #method.as_mut
251 /// When calling this method, you have to ensure that *either* the pointer is null *or*
252 /// all of the following is true:
254 /// * The pointer must be properly aligned.
256 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
258 /// * The pointer must point to an initialized instance of `T`.
260 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
261 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
262 /// In particular, for the duration of this lifetime, the memory the pointer points to must
263 /// not get mutated (except inside `UnsafeCell`).
265 /// This applies even if the result of this method is unused!
266 /// (The part about being initialized is not yet fully decided, but until
267 /// it is, the only safe approach is to ensure that they are indeed initialized.)
269 /// [the module documentation]: crate::ptr#safety
276 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
279 /// if let Some(val_back) = ptr.as_ref() {
280 /// println!("We got back the value: {val_back}!");
285 /// # Null-unchecked version
287 /// If you are sure the pointer can never be null and are looking for some kind of
288 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
289 /// dereference the pointer directly.
292 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
295 /// let val_back = &*ptr;
296 /// println!("We got back the value: {val_back}!");
299 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
300 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
302 pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
303 // SAFETY: the caller must guarantee that `self` is valid for a
304 // reference if it isn't null.
305 if self.is_null() { None } else { unsafe { Some(&*self) } }
308 /// Returns `None` if the pointer is null, or else returns a shared reference to
309 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
310 /// that the value has to be initialized.
312 /// For the mutable counterpart see [`as_uninit_mut`].
314 /// [`as_ref`]: #method.as_ref-1
315 /// [`as_uninit_mut`]: #method.as_uninit_mut
319 /// When calling this method, you have to ensure that *either* the pointer is null *or*
320 /// all of the following is true:
322 /// * The pointer must be properly aligned.
324 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
326 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
327 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
328 /// In particular, for the duration of this lifetime, the memory the pointer points to must
329 /// not get mutated (except inside `UnsafeCell`).
331 /// This applies even if the result of this method is unused!
333 /// [the module documentation]: crate::ptr#safety
340 /// #![feature(ptr_as_uninit)]
342 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
345 /// if let Some(val_back) = ptr.as_uninit_ref() {
346 /// println!("We got back the value: {}!", val_back.assume_init());
351 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
352 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
353 pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
357 // SAFETY: the caller must guarantee that `self` meets all the
358 // requirements for a reference.
359 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
362 /// Calculates the offset from a pointer.
364 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
365 /// offset of `3 * size_of::<T>()` bytes.
369 /// If any of the following conditions are violated, the result is Undefined
372 /// * Both the starting and resulting pointer must be either in bounds or one
373 /// byte past the end of the same [allocated object].
375 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
377 /// * The offset being in bounds cannot rely on "wrapping around" the address
378 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
380 /// The compiler and standard library generally tries to ensure allocations
381 /// never reach a size where an offset is a concern. For instance, `Vec`
382 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
383 /// `vec.as_ptr().add(vec.len())` is always safe.
385 /// Most platforms fundamentally can't even construct such an allocation.
386 /// For instance, no known 64-bit platform can ever serve a request
387 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
388 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
389 /// more than `isize::MAX` bytes with things like Physical Address
390 /// Extension. As such, memory acquired directly from allocators or memory
391 /// mapped files *may* be too large to handle with this function.
393 /// Consider using [`wrapping_offset`] instead if these constraints are
394 /// difficult to satisfy. The only advantage of this method is that it
395 /// enables more aggressive compiler optimizations.
397 /// [`wrapping_offset`]: #method.wrapping_offset
398 /// [allocated object]: crate::ptr#allocated-object
405 /// let mut s = [1, 2, 3];
406 /// let ptr: *mut u32 = s.as_mut_ptr();
409 /// println!("{}", *ptr.offset(1));
410 /// println!("{}", *ptr.offset(2));
413 #[stable(feature = "rust1", since = "1.0.0")]
414 #[must_use = "returns a new pointer rather than modifying its argument"]
415 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
417 pub const unsafe fn offset(self, count: isize) -> *mut T
421 // SAFETY: the caller must uphold the safety contract for `offset`.
422 // The obtained pointer is valid for writes since the caller must
423 // guarantee that it points to the same allocated object as `self`.
424 unsafe { intrinsics::offset(self, count) as *mut T }
427 /// Calculates the offset from a pointer using wrapping arithmetic.
428 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
429 /// offset of `3 * size_of::<T>()` bytes.
433 /// This operation itself is always safe, but using the resulting pointer is not.
435 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
436 /// be used to read or write other allocated objects.
438 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
439 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
440 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
441 /// `x` and `y` point into the same allocated object.
443 /// Compared to [`offset`], this method basically delays the requirement of staying within the
444 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
445 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
446 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
447 /// can be optimized better and is thus preferable in performance-sensitive code.
449 /// The delayed check only considers the value of the pointer that was dereferenced, not the
450 /// intermediate values used during the computation of the final result. For example,
451 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
452 /// words, leaving the allocated object and then re-entering it later is permitted.
454 /// [`offset`]: #method.offset
455 /// [allocated object]: crate::ptr#allocated-object
462 /// // Iterate using a raw pointer in increments of two elements
463 /// let mut data = [1u8, 2, 3, 4, 5];
464 /// let mut ptr: *mut u8 = data.as_mut_ptr();
466 /// let end_rounded_up = ptr.wrapping_offset(6);
468 /// while ptr != end_rounded_up {
472 /// ptr = ptr.wrapping_offset(step);
474 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
476 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
477 #[must_use = "returns a new pointer rather than modifying its argument"]
478 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
480 pub const fn wrapping_offset(self, count: isize) -> *mut T
484 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
485 unsafe { intrinsics::arith_offset(self, count) as *mut T }
488 /// Returns `None` if the pointer is null, or else returns a unique reference to
489 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
490 /// must be used instead.
492 /// For the shared counterpart see [`as_ref`].
494 /// [`as_uninit_mut`]: #method.as_uninit_mut
495 /// [`as_ref`]: #method.as_ref-1
499 /// When calling this method, you have to ensure that *either* the pointer is null *or*
500 /// all of the following is true:
502 /// * The pointer must be properly aligned.
504 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
506 /// * The pointer must point to an initialized instance of `T`.
508 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
509 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
510 /// In particular, for the duration of this lifetime, the memory the pointer points to must
511 /// not get accessed (read or written) through any other pointer.
513 /// This applies even if the result of this method is unused!
514 /// (The part about being initialized is not yet fully decided, but until
515 /// it is, the only safe approach is to ensure that they are indeed initialized.)
517 /// [the module documentation]: crate::ptr#safety
524 /// let mut s = [1, 2, 3];
525 /// let ptr: *mut u32 = s.as_mut_ptr();
526 /// let first_value = unsafe { ptr.as_mut().unwrap() };
527 /// *first_value = 4;
528 /// # assert_eq!(s, [4, 2, 3]);
529 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
532 /// # Null-unchecked version
534 /// If you are sure the pointer can never be null and are looking for some kind of
535 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
536 /// you can dereference the pointer directly.
539 /// let mut s = [1, 2, 3];
540 /// let ptr: *mut u32 = s.as_mut_ptr();
541 /// let first_value = unsafe { &mut *ptr };
542 /// *first_value = 4;
543 /// # assert_eq!(s, [4, 2, 3]);
544 /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
546 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
547 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
549 pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
550 // SAFETY: the caller must guarantee that `self` is be valid for
551 // a mutable reference if it isn't null.
552 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
555 /// Returns `None` if the pointer is null, or else returns a unique reference to
556 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
557 /// that the value has to be initialized.
559 /// For the shared counterpart see [`as_uninit_ref`].
561 /// [`as_mut`]: #method.as_mut
562 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
566 /// When calling this method, you have to ensure that *either* the pointer is null *or*
567 /// all of the following is true:
569 /// * The pointer must be properly aligned.
571 /// * It must be "dereferenceable" in the sense defined in [the module documentation].
573 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
574 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
575 /// In particular, for the duration of this lifetime, the memory the pointer points to must
576 /// not get accessed (read or written) through any other pointer.
578 /// This applies even if the result of this method is unused!
580 /// [the module documentation]: crate::ptr#safety
582 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
583 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
584 pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
588 // SAFETY: the caller must guarantee that `self` meets all the
589 // requirements for a reference.
590 if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
593 /// Returns whether two pointers are guaranteed to be equal.
595 /// At runtime this function behaves like `self == other`.
596 /// However, in some contexts (e.g., compile-time evaluation),
597 /// it is not always possible to determine equality of two pointers, so this function may
598 /// spuriously return `false` for pointers that later actually turn out to be equal.
599 /// But when it returns `true`, the pointers are guaranteed to be equal.
601 /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer
602 /// comparisons for which both functions return `false`.
604 /// [`guaranteed_ne`]: #method.guaranteed_ne
606 /// The return value may change depending on the compiler version and unsafe code might not
607 /// rely on the result of this function for soundness. It is suggested to only use this function
608 /// for performance optimizations where spurious `false` return values by this function do not
609 /// affect the outcome, but just the performance.
610 /// The consequences of using this method to make runtime and compile-time code behave
611 /// differently have not been explored. This method should not be used to introduce such
612 /// differences, and it should also not be stabilized before we have a better understanding
614 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
615 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
617 pub const fn guaranteed_eq(self, other: *mut T) -> bool
621 intrinsics::ptr_guaranteed_eq(self as *const _, other as *const _)
624 /// Returns whether two pointers are guaranteed to be unequal.
626 /// At runtime this function behaves like `self != other`.
627 /// However, in some contexts (e.g., compile-time evaluation),
628 /// it is not always possible to determine the inequality of two pointers, so this function may
629 /// spuriously return `false` for pointers that later actually turn out to be unequal.
630 /// But when it returns `true`, the pointers are guaranteed to be unequal.
632 /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer
633 /// comparisons for which both functions return `false`.
635 /// [`guaranteed_eq`]: #method.guaranteed_eq
637 /// The return value may change depending on the compiler version and unsafe code might not
638 /// rely on the result of this function for soundness. It is suggested to only use this function
639 /// for performance optimizations where spurious `false` return values by this function do not
640 /// affect the outcome, but just the performance.
641 /// The consequences of using this method to make runtime and compile-time code behave
642 /// differently have not been explored. This method should not be used to introduce such
643 /// differences, and it should also not be stabilized before we have a better understanding
645 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
646 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
648 pub const unsafe fn guaranteed_ne(self, other: *mut T) -> bool
652 intrinsics::ptr_guaranteed_ne(self as *const _, other as *const _)
655 /// Calculates the distance between two pointers. The returned value is in
656 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
658 /// This function is the inverse of [`offset`].
660 /// [`offset`]: #method.offset-1
664 /// If any of the following conditions are violated, the result is Undefined
667 /// * Both the starting and other pointer must be either in bounds or one
668 /// byte past the end of the same [allocated object].
670 /// * Both pointers must be *derived from* a pointer to the same object.
671 /// (See below for an example.)
673 /// * The distance between the pointers, in bytes, must be an exact multiple
674 /// of the size of `T`.
676 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
678 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
680 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
681 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
682 /// the last two conditions. The standard library also generally ensures that allocations
683 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
684 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
685 /// always satisfies the last two conditions.
687 /// Most platforms fundamentally can't even construct such a large allocation.
688 /// For instance, no known 64-bit platform can ever serve a request
689 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
690 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
691 /// more than `isize::MAX` bytes with things like Physical Address
692 /// Extension. As such, memory acquired directly from allocators or memory
693 /// mapped files *may* be too large to handle with this function.
694 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
695 /// such large allocations either.)
697 /// [`add`]: #method.add
698 /// [allocated object]: crate::ptr#allocated-object
702 /// This function panics if `T` is a Zero-Sized Type ("ZST").
709 /// let mut a = [0; 5];
710 /// let ptr1: *mut i32 = &mut a[1];
711 /// let ptr2: *mut i32 = &mut a[3];
713 /// assert_eq!(ptr2.offset_from(ptr1), 2);
714 /// assert_eq!(ptr1.offset_from(ptr2), -2);
715 /// assert_eq!(ptr1.offset(2), ptr2);
716 /// assert_eq!(ptr2.offset(-2), ptr1);
720 /// *Incorrect* usage:
723 /// let ptr1 = Box::into_raw(Box::new(0u8));
724 /// let ptr2 = Box::into_raw(Box::new(1u8));
725 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
726 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
727 /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff);
728 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
729 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
730 /// // computing their offset is undefined behavior, even though
731 /// // they point to the same address!
733 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
736 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
737 #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "92980")]
739 pub const unsafe fn offset_from(self, origin: *const T) -> isize
743 // SAFETY: the caller must uphold the safety contract for `offset_from`.
744 unsafe { (self as *const T).offset_from(origin) }
747 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
749 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
750 /// offset of `3 * size_of::<T>()` bytes.
754 /// If any of the following conditions are violated, the result is Undefined
757 /// * Both the starting and resulting pointer must be either in bounds or one
758 /// byte past the end of the same [allocated object].
760 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
762 /// * The offset being in bounds cannot rely on "wrapping around" the address
763 /// space. That is, the infinite-precision sum must fit in a `usize`.
765 /// The compiler and standard library generally tries to ensure allocations
766 /// never reach a size where an offset is a concern. For instance, `Vec`
767 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
768 /// `vec.as_ptr().add(vec.len())` is always safe.
770 /// Most platforms fundamentally can't even construct such an allocation.
771 /// For instance, no known 64-bit platform can ever serve a request
772 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
773 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
774 /// more than `isize::MAX` bytes with things like Physical Address
775 /// Extension. As such, memory acquired directly from allocators or memory
776 /// mapped files *may* be too large to handle with this function.
778 /// Consider using [`wrapping_add`] instead if these constraints are
779 /// difficult to satisfy. The only advantage of this method is that it
780 /// enables more aggressive compiler optimizations.
782 /// [`wrapping_add`]: #method.wrapping_add
783 /// [allocated object]: crate::ptr#allocated-object
790 /// let s: &str = "123";
791 /// let ptr: *const u8 = s.as_ptr();
794 /// println!("{}", *ptr.add(1) as char);
795 /// println!("{}", *ptr.add(2) as char);
798 #[stable(feature = "pointer_methods", since = "1.26.0")]
799 #[must_use = "returns a new pointer rather than modifying its argument"]
800 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
802 pub const unsafe fn add(self, count: usize) -> Self
806 // SAFETY: the caller must uphold the safety contract for `offset`.
807 unsafe { self.offset(count as isize) }
810 /// Calculates the offset from a pointer (convenience for
811 /// `.offset((count as isize).wrapping_neg())`).
813 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
814 /// offset of `3 * size_of::<T>()` bytes.
818 /// If any of the following conditions are violated, the result is Undefined
821 /// * Both the starting and resulting pointer must be either in bounds or one
822 /// byte past the end of the same [allocated object].
824 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
826 /// * The offset being in bounds cannot rely on "wrapping around" the address
827 /// space. That is, the infinite-precision sum must fit in a usize.
829 /// The compiler and standard library generally tries to ensure allocations
830 /// never reach a size where an offset is a concern. For instance, `Vec`
831 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
832 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
834 /// Most platforms fundamentally can't even construct such an allocation.
835 /// For instance, no known 64-bit platform can ever serve a request
836 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
837 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
838 /// more than `isize::MAX` bytes with things like Physical Address
839 /// Extension. As such, memory acquired directly from allocators or memory
840 /// mapped files *may* be too large to handle with this function.
842 /// Consider using [`wrapping_sub`] instead if these constraints are
843 /// difficult to satisfy. The only advantage of this method is that it
844 /// enables more aggressive compiler optimizations.
846 /// [`wrapping_sub`]: #method.wrapping_sub
847 /// [allocated object]: crate::ptr#allocated-object
854 /// let s: &str = "123";
857 /// let end: *const u8 = s.as_ptr().add(3);
858 /// println!("{}", *end.sub(1) as char);
859 /// println!("{}", *end.sub(2) as char);
862 #[stable(feature = "pointer_methods", since = "1.26.0")]
863 #[must_use = "returns a new pointer rather than modifying its argument"]
864 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
866 pub const unsafe fn sub(self, count: usize) -> Self
870 // SAFETY: the caller must uphold the safety contract for `offset`.
871 unsafe { self.offset((count as isize).wrapping_neg()) }
874 /// Calculates the offset from a pointer using wrapping arithmetic.
875 /// (convenience for `.wrapping_offset(count as isize)`)
877 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
878 /// offset of `3 * size_of::<T>()` bytes.
882 /// This operation itself is always safe, but using the resulting pointer is not.
884 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
885 /// be used to read or write other allocated objects.
887 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
888 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
889 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
890 /// `x` and `y` point into the same allocated object.
892 /// Compared to [`add`], this method basically delays the requirement of staying within the
893 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
894 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
895 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
896 /// can be optimized better and is thus preferable in performance-sensitive code.
898 /// The delayed check only considers the value of the pointer that was dereferenced, not the
899 /// intermediate values used during the computation of the final result. For example,
900 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
901 /// allocated object and then re-entering it later is permitted.
903 /// [`add`]: #method.add
904 /// [allocated object]: crate::ptr#allocated-object
911 /// // Iterate using a raw pointer in increments of two elements
912 /// let data = [1u8, 2, 3, 4, 5];
913 /// let mut ptr: *const u8 = data.as_ptr();
915 /// let end_rounded_up = ptr.wrapping_add(6);
917 /// // This loop prints "1, 3, 5, "
918 /// while ptr != end_rounded_up {
920 /// print!("{}, ", *ptr);
922 /// ptr = ptr.wrapping_add(step);
925 #[stable(feature = "pointer_methods", since = "1.26.0")]
926 #[must_use = "returns a new pointer rather than modifying its argument"]
927 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
929 pub const fn wrapping_add(self, count: usize) -> Self
933 self.wrapping_offset(count as isize)
936 /// Calculates the offset from a pointer using wrapping arithmetic.
937 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
939 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
940 /// offset of `3 * size_of::<T>()` bytes.
944 /// This operation itself is always safe, but using the resulting pointer is not.
946 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
947 /// be used to read or write other allocated objects.
949 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
950 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
951 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
952 /// `x` and `y` point into the same allocated object.
954 /// Compared to [`sub`], this method basically delays the requirement of staying within the
955 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
956 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
957 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
958 /// can be optimized better and is thus preferable in performance-sensitive code.
960 /// The delayed check only considers the value of the pointer that was dereferenced, not the
961 /// intermediate values used during the computation of the final result. For example,
962 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
963 /// allocated object and then re-entering it later is permitted.
965 /// [`sub`]: #method.sub
966 /// [allocated object]: crate::ptr#allocated-object
973 /// // Iterate using a raw pointer in increments of two elements (backwards)
974 /// let data = [1u8, 2, 3, 4, 5];
975 /// let mut ptr: *const u8 = data.as_ptr();
976 /// let start_rounded_down = ptr.wrapping_sub(2);
977 /// ptr = ptr.wrapping_add(4);
979 /// // This loop prints "5, 3, 1, "
980 /// while ptr != start_rounded_down {
982 /// print!("{}, ", *ptr);
984 /// ptr = ptr.wrapping_sub(step);
987 #[stable(feature = "pointer_methods", since = "1.26.0")]
988 #[must_use = "returns a new pointer rather than modifying its argument"]
989 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
991 pub const fn wrapping_sub(self, count: usize) -> Self
995 self.wrapping_offset((count as isize).wrapping_neg())
998 /// Reads the value from `self` without moving it. This leaves the
999 /// memory in `self` unchanged.
1001 /// See [`ptr::read`] for safety concerns and examples.
1003 /// [`ptr::read`]: crate::ptr::read()
1004 #[stable(feature = "pointer_methods", since = "1.26.0")]
1005 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
1007 pub const unsafe fn read(self) -> T
1011 // SAFETY: the caller must uphold the safety contract for ``.
1012 unsafe { read(self) }
1015 /// Performs a volatile read of the value from `self` without moving it. This
1016 /// leaves the memory in `self` unchanged.
1018 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1019 /// to not be elided or reordered by the compiler across other volatile
1022 /// See [`ptr::read_volatile`] for safety concerns and examples.
1024 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1025 #[stable(feature = "pointer_methods", since = "1.26.0")]
1027 pub unsafe fn read_volatile(self) -> T
1031 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1032 unsafe { read_volatile(self) }
1035 /// Reads the value from `self` without moving it. This leaves the
1036 /// memory in `self` unchanged.
1038 /// Unlike `read`, the pointer may be unaligned.
1040 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1042 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1043 #[stable(feature = "pointer_methods", since = "1.26.0")]
1044 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
1046 pub const unsafe fn read_unaligned(self) -> T
1050 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1051 unsafe { read_unaligned(self) }
1054 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1055 /// and destination may overlap.
1057 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1059 /// See [`ptr::copy`] for safety concerns and examples.
1061 /// [`ptr::copy`]: crate::ptr::copy()
1062 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1063 #[stable(feature = "pointer_methods", since = "1.26.0")]
1065 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1069 // SAFETY: the caller must uphold the safety contract for `copy`.
1070 unsafe { copy(self, dest, count) }
1073 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
1074 /// and destination may *not* overlap.
1076 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1078 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1080 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1081 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1082 #[stable(feature = "pointer_methods", since = "1.26.0")]
1084 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1088 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1089 unsafe { copy_nonoverlapping(self, dest, count) }
1092 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1093 /// and destination may overlap.
1095 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1097 /// See [`ptr::copy`] for safety concerns and examples.
1099 /// [`ptr::copy`]: crate::ptr::copy()
1100 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1101 #[stable(feature = "pointer_methods", since = "1.26.0")]
1103 pub const unsafe fn copy_from(self, src: *const T, count: usize)
1107 // SAFETY: the caller must uphold the safety contract for `copy`.
1108 unsafe { copy(src, self, count) }
1111 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
1112 /// and destination may *not* overlap.
1114 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1116 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1118 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1119 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
1120 #[stable(feature = "pointer_methods", since = "1.26.0")]
1122 pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1126 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1127 unsafe { copy_nonoverlapping(src, self, count) }
1130 /// Executes the destructor (if any) of the pointed-to value.
1132 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1134 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1135 #[stable(feature = "pointer_methods", since = "1.26.0")]
1137 pub unsafe fn drop_in_place(self) {
1138 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1139 unsafe { drop_in_place(self) }
1142 /// Overwrites a memory location with the given value without reading or
1143 /// dropping the old value.
1145 /// See [`ptr::write`] for safety concerns and examples.
1147 /// [`ptr::write`]: crate::ptr::write()
1148 #[stable(feature = "pointer_methods", since = "1.26.0")]
1149 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1151 pub const unsafe fn write(self, val: T)
1155 // SAFETY: the caller must uphold the safety contract for `write`.
1156 unsafe { write(self, val) }
1159 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1160 /// bytes of memory starting at `self` to `val`.
1162 /// See [`ptr::write_bytes`] for safety concerns and examples.
1164 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1165 #[stable(feature = "pointer_methods", since = "1.26.0")]
1166 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1168 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1172 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1173 unsafe { write_bytes(self, val, count) }
1176 /// Performs a volatile write of a memory location with the given value without
1177 /// reading or dropping the old value.
1179 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1180 /// to not be elided or reordered by the compiler across other volatile
1183 /// See [`ptr::write_volatile`] for safety concerns and examples.
1185 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1186 #[stable(feature = "pointer_methods", since = "1.26.0")]
1188 pub unsafe fn write_volatile(self, val: T)
1192 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1193 unsafe { write_volatile(self, val) }
1196 /// Overwrites a memory location with the given value without reading or
1197 /// dropping the old value.
1199 /// Unlike `write`, the pointer may be unaligned.
1201 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1203 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1204 #[stable(feature = "pointer_methods", since = "1.26.0")]
1205 #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
1207 pub const unsafe fn write_unaligned(self, val: T)
1211 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1212 unsafe { write_unaligned(self, val) }
1215 /// Replaces the value at `self` with `src`, returning the old
1216 /// value, without dropping either.
1218 /// See [`ptr::replace`] for safety concerns and examples.
1220 /// [`ptr::replace`]: crate::ptr::replace()
1221 #[stable(feature = "pointer_methods", since = "1.26.0")]
1223 pub unsafe fn replace(self, src: T) -> T
1227 // SAFETY: the caller must uphold the safety contract for `replace`.
1228 unsafe { replace(self, src) }
1231 /// Swaps the values at two mutable locations of the same type, without
1232 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1233 /// otherwise equivalent.
1235 /// See [`ptr::swap`] for safety concerns and examples.
1237 /// [`ptr::swap`]: crate::ptr::swap()
1238 #[stable(feature = "pointer_methods", since = "1.26.0")]
1239 #[rustc_const_unstable(feature = "const_swap", issue = "83163")]
1241 pub const unsafe fn swap(self, with: *mut T)
1245 // SAFETY: the caller must uphold the safety contract for `swap`.
1246 unsafe { swap(self, with) }
1249 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1252 /// If it is not possible to align the pointer, the implementation returns
1253 /// `usize::MAX`. It is permissible for the implementation to *always*
1254 /// return `usize::MAX`. Only your algorithm's performance can depend
1255 /// on getting a usable offset here, not its correctness.
1257 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1258 /// used with the `wrapping_add` method.
1260 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1261 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1262 /// the returned offset is correct in all terms other than alignment.
1266 /// The function panics if `align` is not a power-of-two.
1270 /// Accessing adjacent `u8` as `u16`
1273 /// # fn foo(n: usize) {
1274 /// # use std::mem::align_of;
1276 /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
1277 /// let ptr = x.as_ptr().add(n) as *const u8;
1278 /// let offset = ptr.align_offset(align_of::<u16>());
1279 /// if offset < x.len() - n - 1 {
1280 /// let u16_ptr = ptr.add(offset) as *const u16;
1281 /// assert_ne!(*u16_ptr, 500);
1283 /// // while the pointer can be aligned via `offset`, it would point
1284 /// // outside the allocation
1288 #[stable(feature = "align_offset", since = "1.36.0")]
1289 #[rustc_const_unstable(feature = "const_align_offset", issue = "90962")]
1290 pub const fn align_offset(self, align: usize) -> usize
1294 if !align.is_power_of_two() {
1295 panic!("align_offset: align is not a power-of-two");
1298 fn rt_impl<T>(p: *mut T, align: usize) -> usize {
1299 // SAFETY: `align` has been checked to be a power of 2 above
1300 unsafe { align_offset(p, align) }
1303 const fn ctfe_impl<T>(_: *mut T, _: usize) -> usize {
1308 // It is permisseble for `align_offset` to always return `usize::MAX`,
1309 // algorithm correctness can not depend on `align_offset` returning non-max values.
1311 // As such the behaviour can't change after replacing `align_offset` with `usize::MAX`, only performance can.
1312 unsafe { intrinsics::const_eval_select((self, align), ctfe_impl, rt_impl) }
1316 #[cfg_attr(bootstrap, lang = "mut_slice_ptr")]
1318 /// Returns the length of a raw slice.
1320 /// The returned value is the number of **elements**, not the number of bytes.
1322 /// This function is safe, even when the raw slice cannot be cast to a slice
1323 /// reference because the pointer is null or unaligned.
1328 /// #![feature(slice_ptr_len)]
1331 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1332 /// assert_eq!(slice.len(), 3);
1335 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1336 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1337 pub const fn len(self) -> usize {
1341 /// Returns a raw pointer to the slice's buffer.
1343 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1348 /// #![feature(slice_ptr_get)]
1351 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1352 /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
1355 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1356 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
1357 pub const fn as_mut_ptr(self) -> *mut T {
1361 /// Returns a raw pointer to an element or subslice, without doing bounds
1364 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1365 /// is *[undefined behavior]* even if the resulting pointer is not used.
1367 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1372 /// #![feature(slice_ptr_get)]
1374 /// let x = &mut [1, 2, 4] as *mut [i32];
1377 /// assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1380 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1381 #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
1383 pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1385 I: ~const SliceIndex<[T]>,
1387 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1388 unsafe { index.get_unchecked_mut(self) }
1391 /// Returns `None` if the pointer is null, or else returns a shared slice to
1392 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1393 /// that the value has to be initialized.
1395 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1397 /// [`as_ref`]: #method.as_ref-1
1398 /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
1402 /// When calling this method, you have to ensure that *either* the pointer is null *or*
1403 /// all of the following is true:
1405 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
1406 /// and it must be properly aligned. This means in particular:
1408 /// * The entire memory range of this slice must be contained within a single [allocated object]!
1409 /// Slices can never span across multiple allocated objects.
1411 /// * The pointer must be aligned even for zero-length slices. One
1412 /// reason for this is that enum layout optimizations may rely on references
1413 /// (including slices of any length) being aligned and non-null to distinguish
1414 /// them from other data. You can obtain a pointer that is usable as `data`
1415 /// for zero-length slices using [`NonNull::dangling()`].
1417 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1418 /// See the safety documentation of [`pointer::offset`].
1420 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1421 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1422 /// In particular, for the duration of this lifetime, the memory the pointer points to must
1423 /// not get mutated (except inside `UnsafeCell`).
1425 /// This applies even if the result of this method is unused!
1427 /// See also [`slice::from_raw_parts`][].
1429 /// [valid]: crate::ptr#safety
1430 /// [allocated object]: crate::ptr#allocated-object
1432 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1433 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
1434 pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1438 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1439 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1443 /// Returns `None` if the pointer is null, or else returns a unique slice to
1444 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1445 /// that the value has to be initialized.
1447 /// For the shared counterpart see [`as_uninit_slice`].
1449 /// [`as_mut`]: #method.as_mut
1450 /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1454 /// When calling this method, you have to ensure that *either* the pointer is null *or*
1455 /// all of the following is true:
1457 /// * The pointer must be [valid] for reads and writes for `ptr.len() * mem::size_of::<T>()`
1458 /// many bytes, and it must be properly aligned. This means in particular:
1460 /// * The entire memory range of this slice must be contained within a single [allocated object]!
1461 /// Slices can never span across multiple allocated objects.
1463 /// * The pointer must be aligned even for zero-length slices. One
1464 /// reason for this is that enum layout optimizations may rely on references
1465 /// (including slices of any length) being aligned and non-null to distinguish
1466 /// them from other data. You can obtain a pointer that is usable as `data`
1467 /// for zero-length slices using [`NonNull::dangling()`].
1469 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1470 /// See the safety documentation of [`pointer::offset`].
1472 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1473 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1474 /// In particular, for the duration of this lifetime, the memory the pointer points to must
1475 /// not get accessed (read or written) through any other pointer.
1477 /// This applies even if the result of this method is unused!
1479 /// See also [`slice::from_raw_parts_mut`][].
1481 /// [valid]: crate::ptr#safety
1482 /// [allocated object]: crate::ptr#allocated-object
1484 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1485 #[rustc_const_unstable(feature = "const_ptr_as_ref", issue = "91822")]
1486 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
1490 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1491 Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
1496 // Equality for pointers
1497 #[stable(feature = "rust1", since = "1.0.0")]
1498 impl<T: ?Sized> PartialEq for *mut T {
1500 fn eq(&self, other: &*mut T) -> bool {
1505 #[stable(feature = "rust1", since = "1.0.0")]
1506 impl<T: ?Sized> Eq for *mut T {}
1508 #[stable(feature = "rust1", since = "1.0.0")]
1509 impl<T: ?Sized> Ord for *mut T {
1511 fn cmp(&self, other: &*mut T) -> Ordering {
1514 } else if self == other {
1522 #[stable(feature = "rust1", since = "1.0.0")]
1523 impl<T: ?Sized> PartialOrd for *mut T {
1525 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
1526 Some(self.cmp(other))
1530 fn lt(&self, other: &*mut T) -> bool {
1535 fn le(&self, other: &*mut T) -> bool {
1540 fn gt(&self, other: &*mut T) -> bool {
1545 fn ge(&self, other: &*mut T) -> bool {