1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // FIXME: talk about offset, copy_memory, copy_nonoverlapping_memory
13 //! Raw, unsafe pointers, `*const T`, and `*mut T`.
15 //! *[See also the pointer primitive types](../../std/primitive.pointer.html).*
17 #![stable(feature = "rust1", since = "1.0.0")]
21 use ops::CoerceUnsized;
24 use marker::{PhantomData, Unsize};
28 use cmp::Ordering::{self, Less, Equal, Greater};
30 // FIXME #19649: intrinsic docs don't render, so these have no docs :(
32 #[stable(feature = "rust1", since = "1.0.0")]
33 pub use intrinsics::copy_nonoverlapping;
35 #[stable(feature = "rust1", since = "1.0.0")]
36 pub use intrinsics::copy;
38 #[stable(feature = "rust1", since = "1.0.0")]
39 pub use intrinsics::write_bytes;
41 /// Executes the destructor (if any) of the pointed-to value.
43 /// This has two use cases:
45 /// * It is *required* to use `drop_in_place` to drop unsized types like
46 /// trait objects, because they can't be read out onto the stack and
49 /// * It is friendlier to the optimizer to do this over `ptr::read` when
50 /// dropping manually allocated memory (e.g. when writing Box/Rc/Vec),
51 /// as the compiler doesn't need to prove that it's sound to elide the
54 /// # Undefined Behavior
56 /// This has all the same safety problems as `ptr::read` with respect to
57 /// invalid pointers, types, and double drops.
58 #[stable(feature = "drop_in_place", since = "1.8.0")]
59 #[lang="drop_in_place"]
60 #[allow(unconditional_recursion)]
61 pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
62 // Code here does not matter - this is replaced by the
63 // real drop glue by the compiler.
64 drop_in_place(to_drop);
67 /// Creates a null raw pointer.
74 /// let p: *const i32 = ptr::null();
75 /// assert!(p.is_null());
78 #[stable(feature = "rust1", since = "1.0.0")]
79 pub const fn null<T>() -> *const T { 0 as *const T }
81 /// Creates a null mutable raw pointer.
88 /// let p: *mut i32 = ptr::null_mut();
89 /// assert!(p.is_null());
92 #[stable(feature = "rust1", since = "1.0.0")]
93 pub const fn null_mut<T>() -> *mut T { 0 as *mut T }
95 /// Swaps the values at two mutable locations of the same type, without
96 /// deinitializing either. They may overlap, unlike `mem::swap` which is
97 /// otherwise equivalent.
101 /// This function copies the memory through the raw pointers passed to it
104 /// Ensure that these pointers are valid before calling `swap`.
106 #[stable(feature = "rust1", since = "1.0.0")]
107 pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
108 // Give ourselves some scratch space to work with
109 let mut tmp: T = mem::uninitialized();
112 copy_nonoverlapping(x, &mut tmp, 1);
113 copy(y, x, 1); // `x` and `y` may overlap
114 copy_nonoverlapping(&tmp, y, 1);
116 // y and t now point to the same thing, but we need to completely forget `tmp`
117 // because it's no longer relevant.
121 /// Swaps a sequence of values at two mutable locations of the same type.
125 /// The two arguments must each point to the beginning of `count` locations
126 /// of valid memory, and the two memory ranges must not overlap.
133 /// #![feature(swap_nonoverlapping)]
137 /// let mut x = [1, 2, 3, 4];
138 /// let mut y = [7, 8, 9];
141 /// ptr::swap_nonoverlapping(x.as_mut_ptr(), y.as_mut_ptr(), 2);
144 /// assert_eq!(x, [7, 8, 3, 4]);
145 /// assert_eq!(y, [1, 2, 9]);
148 #[unstable(feature = "swap_nonoverlapping", issue = "42818")]
149 pub unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
150 let x = x as *mut u8;
151 let y = y as *mut u8;
152 let len = mem::size_of::<T>() * count;
153 swap_nonoverlapping_bytes(x, y, len)
157 unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) {
158 // The approach here is to utilize simd to swap x & y efficiently. Testing reveals
159 // that swapping either 32 bytes or 64 bytes at a time is most efficient for intel
160 // Haswell E processors. LLVM is more able to optimize if we give a struct a
161 // #[repr(simd)], even if we don't actually use this struct directly.
163 // FIXME repr(simd) broken on emscripten and redox
164 // It's also broken on big-endian powerpc64 and s390x. #42778
165 #[cfg_attr(not(any(target_os = "emscripten", target_os = "redox",
166 target_endian = "big")),
168 struct Block(u64, u64, u64, u64);
169 struct UnalignedBlock(u64, u64, u64, u64);
171 let block_size = mem::size_of::<Block>();
173 // Loop through x & y, copying them `Block` at a time
174 // The optimizer should unroll the loop fully for most types
175 // N.B. We can't use a for loop as the `range` impl calls `mem::swap` recursively
177 while i + block_size <= len {
178 // Create some uninitialized memory as scratch space
179 // Declaring `t` here avoids aligning the stack when this loop is unused
180 let mut t: Block = mem::uninitialized();
181 let t = &mut t as *mut _ as *mut u8;
182 let x = x.offset(i as isize);
183 let y = y.offset(i as isize);
185 // Swap a block of bytes of x & y, using t as a temporary buffer
186 // This should be optimized into efficient SIMD operations where available
187 copy_nonoverlapping(x, t, block_size);
188 copy_nonoverlapping(y, x, block_size);
189 copy_nonoverlapping(t, y, block_size);
194 // Swap any remaining bytes
195 let mut t: UnalignedBlock = mem::uninitialized();
198 let t = &mut t as *mut _ as *mut u8;
199 let x = x.offset(i as isize);
200 let y = y.offset(i as isize);
202 copy_nonoverlapping(x, t, rem);
203 copy_nonoverlapping(y, x, rem);
204 copy_nonoverlapping(t, y, rem);
208 /// Replaces the value at `dest` with `src`, returning the old
209 /// value, without dropping either.
213 /// This is only unsafe because it accepts a raw pointer.
214 /// Otherwise, this operation is identical to `mem::replace`.
216 #[stable(feature = "rust1", since = "1.0.0")]
217 pub unsafe fn replace<T>(dest: *mut T, mut src: T) -> T {
218 mem::swap(&mut *dest, &mut src); // cannot overlap
222 /// Reads the value from `src` without moving it. This leaves the
223 /// memory in `src` unchanged.
227 /// Beyond accepting a raw pointer, this is unsafe because it semantically
228 /// moves the value out of `src` without preventing further usage of `src`.
229 /// If `T` is not `Copy`, then care must be taken to ensure that the value at
230 /// `src` is not used before the data is overwritten again (e.g. with `write`,
231 /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
232 /// because it will attempt to drop the value previously at `*src`.
234 /// The pointer must be aligned; use `read_unaligned` if that is not the case.
242 /// let y = &x as *const i32;
245 /// assert_eq!(std::ptr::read(y), 12);
249 #[stable(feature = "rust1", since = "1.0.0")]
250 pub unsafe fn read<T>(src: *const T) -> T {
251 let mut tmp: T = mem::uninitialized();
252 copy_nonoverlapping(src, &mut tmp, 1);
256 /// Reads the value from `src` without moving it. This leaves the
257 /// memory in `src` unchanged.
259 /// Unlike `read`, the pointer may be unaligned.
263 /// Beyond accepting a raw pointer, this is unsafe because it semantically
264 /// moves the value out of `src` without preventing further usage of `src`.
265 /// If `T` is not `Copy`, then care must be taken to ensure that the value at
266 /// `src` is not used before the data is overwritten again (e.g. with `write`,
267 /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
268 /// because it will attempt to drop the value previously at `*src`.
276 /// let y = &x as *const i32;
279 /// assert_eq!(std::ptr::read_unaligned(y), 12);
283 #[stable(feature = "ptr_unaligned", since = "1.17.0")]
284 pub unsafe fn read_unaligned<T>(src: *const T) -> T {
285 let mut tmp: T = mem::uninitialized();
286 copy_nonoverlapping(src as *const u8,
287 &mut tmp as *mut T as *mut u8,
288 mem::size_of::<T>());
292 /// Overwrites a memory location with the given value without reading or
293 /// dropping the old value.
297 /// This operation is marked unsafe because it accepts a raw pointer.
299 /// It does not drop the contents of `dst`. This is safe, but it could leak
300 /// allocations or resources, so care must be taken not to overwrite an object
301 /// that should be dropped.
303 /// Additionally, it does not drop `src`. Semantically, `src` is moved into the
304 /// location pointed to by `dst`.
306 /// This is appropriate for initializing uninitialized memory, or overwriting
307 /// memory that has previously been `read` from.
309 /// The pointer must be aligned; use `write_unaligned` if that is not the case.
317 /// let y = &mut x as *mut i32;
321 /// std::ptr::write(y, z);
322 /// assert_eq!(std::ptr::read(y), 12);
326 #[stable(feature = "rust1", since = "1.0.0")]
327 pub unsafe fn write<T>(dst: *mut T, src: T) {
328 intrinsics::move_val_init(&mut *dst, src)
331 /// Overwrites a memory location with the given value without reading or
332 /// dropping the old value.
334 /// Unlike `write`, the pointer may be unaligned.
338 /// This operation is marked unsafe because it accepts a raw pointer.
340 /// It does not drop the contents of `dst`. This is safe, but it could leak
341 /// allocations or resources, so care must be taken not to overwrite an object
342 /// that should be dropped.
344 /// Additionally, it does not drop `src`. Semantically, `src` is moved into the
345 /// location pointed to by `dst`.
347 /// This is appropriate for initializing uninitialized memory, or overwriting
348 /// memory that has previously been `read` from.
356 /// let y = &mut x as *mut i32;
360 /// std::ptr::write_unaligned(y, z);
361 /// assert_eq!(std::ptr::read_unaligned(y), 12);
365 #[stable(feature = "ptr_unaligned", since = "1.17.0")]
366 pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
367 copy_nonoverlapping(&src as *const T as *const u8,
369 mem::size_of::<T>());
373 /// Performs a volatile read of the value from `src` without moving it. This
374 /// leaves the memory in `src` unchanged.
376 /// Volatile operations are intended to act on I/O memory, and are guaranteed
377 /// to not be elided or reordered by the compiler across other volatile
382 /// Rust does not currently have a rigorously and formally defined memory model,
383 /// so the precise semantics of what "volatile" means here is subject to change
384 /// over time. That being said, the semantics will almost always end up pretty
385 /// similar to [C11's definition of volatile][c11].
387 /// The compiler shouldn't change the relative order or number of volatile
388 /// memory operations. However, volatile memory operations on zero-sized types
389 /// (e.g. if a zero-sized type is passed to `read_volatile`) are no-ops
390 /// and may be ignored.
392 /// [c11]: http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1570.pdf
396 /// Beyond accepting a raw pointer, this is unsafe because it semantically
397 /// moves the value out of `src` without preventing further usage of `src`.
398 /// If `T` is not `Copy`, then care must be taken to ensure that the value at
399 /// `src` is not used before the data is overwritten again (e.g. with `write`,
400 /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
401 /// because it will attempt to drop the value previously at `*src`.
409 /// let y = &x as *const i32;
412 /// assert_eq!(std::ptr::read_volatile(y), 12);
416 #[stable(feature = "volatile", since = "1.9.0")]
417 pub unsafe fn read_volatile<T>(src: *const T) -> T {
418 intrinsics::volatile_load(src)
421 /// Performs a volatile write of a memory location with the given value without
422 /// reading or dropping the old value.
424 /// Volatile operations are intended to act on I/O memory, and are guaranteed
425 /// to not be elided or reordered by the compiler across other volatile
430 /// Rust does not currently have a rigorously and formally defined memory model,
431 /// so the precise semantics of what "volatile" means here is subject to change
432 /// over time. That being said, the semantics will almost always end up pretty
433 /// similar to [C11's definition of volatile][c11].
435 /// The compiler shouldn't change the relative order or number of volatile
436 /// memory operations. However, volatile memory operations on zero-sized types
437 /// (e.g. if a zero-sized type is passed to `write_volatile`) are no-ops
438 /// and may be ignored.
440 /// [c11]: http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1570.pdf
444 /// This operation is marked unsafe because it accepts a raw pointer.
446 /// It does not drop the contents of `dst`. This is safe, but it could leak
447 /// allocations or resources, so care must be taken not to overwrite an object
448 /// that should be dropped.
450 /// This is appropriate for initializing uninitialized memory, or overwriting
451 /// memory that has previously been `read` from.
459 /// let y = &mut x as *mut i32;
463 /// std::ptr::write_volatile(y, z);
464 /// assert_eq!(std::ptr::read_volatile(y), 12);
468 #[stable(feature = "volatile", since = "1.9.0")]
469 pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
470 intrinsics::volatile_store(dst, src);
473 #[lang = "const_ptr"]
474 impl<T: ?Sized> *const T {
475 /// Returns `true` if the pointer is null.
482 /// let s: &str = "Follow the rabbit";
483 /// let ptr: *const u8 = s.as_ptr();
484 /// assert!(!ptr.is_null());
486 #[stable(feature = "rust1", since = "1.0.0")]
488 pub fn is_null(self) -> bool where T: Sized {
492 /// Returns `None` if the pointer is null, or else returns a reference to
493 /// the value wrapped in `Some`.
497 /// While this method and its mutable counterpart are useful for
498 /// null-safety, it is important to note that this is still an unsafe
499 /// operation because the returned value could be pointing to invalid
502 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
503 /// not necessarily reflect the actual lifetime of the data.
510 /// let ptr: *const u8 = &10u8 as *const u8;
513 /// if let Some(val_back) = ptr.as_ref() {
514 /// println!("We got back the value: {}!", val_back);
518 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
520 pub unsafe fn as_ref<'a>(self) -> Option<&'a T> where T: Sized {
528 /// Calculates the offset from a pointer. `count` is in units of T; e.g. a
529 /// `count` of 3 represents a pointer offset of `3 * size_of::<T>()` bytes.
533 /// Both the starting and resulting pointer must be either in bounds or one
534 /// byte past the end of an allocated object. If either pointer is out of
535 /// bounds or arithmetic overflow occurs then
536 /// any further use of the returned value will result in undefined behavior.
543 /// let s: &str = "123";
544 /// let ptr: *const u8 = s.as_ptr();
547 /// println!("{}", *ptr.offset(1) as char);
548 /// println!("{}", *ptr.offset(2) as char);
551 #[stable(feature = "rust1", since = "1.0.0")]
553 pub unsafe fn offset(self, count: isize) -> *const T where T: Sized {
554 intrinsics::offset(self, count)
557 /// Calculates the offset from a pointer using wrapping arithmetic.
558 /// `count` is in units of T; e.g. a `count` of 3 represents a pointer
559 /// offset of `3 * size_of::<T>()` bytes.
563 /// The resulting pointer does not need to be in bounds, but it is
564 /// potentially hazardous to dereference (which requires `unsafe`).
566 /// Always use `.offset(count)` instead when possible, because `offset`
567 /// allows the compiler to optimize better.
574 /// // Iterate using a raw pointer in increments of two elements
575 /// let data = [1u8, 2, 3, 4, 5];
576 /// let mut ptr: *const u8 = data.as_ptr();
578 /// let end_rounded_up = ptr.wrapping_offset(6);
580 /// // This loop prints "1, 3, 5, "
581 /// while ptr != end_rounded_up {
583 /// print!("{}, ", *ptr);
585 /// ptr = ptr.wrapping_offset(step);
588 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
590 pub fn wrapping_offset(self, count: isize) -> *const T where T: Sized {
592 intrinsics::arith_offset(self, count)
596 /// Calculates the distance between two pointers. The returned value is in
597 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
599 /// If the address different between the two pointers ia not a multiple of
600 /// `mem::size_of::<T>()` then the result of the division is rounded towards
603 /// This function returns `None` if `T` is a zero-sized typed.
610 /// #![feature(offset_to)]
614 /// let ptr1: *const i32 = &a[1];
615 /// let ptr2: *const i32 = &a[3];
616 /// assert_eq!(ptr1.offset_to(ptr2), Some(2));
617 /// assert_eq!(ptr2.offset_to(ptr1), Some(-2));
618 /// assert_eq!(unsafe { ptr1.offset(2) }, ptr2);
619 /// assert_eq!(unsafe { ptr2.offset(-2) }, ptr1);
622 #[unstable(feature = "offset_to", issue = "41079")]
624 pub fn offset_to(self, other: *const T) -> Option<isize> where T: Sized {
625 let size = mem::size_of::<T>();
629 let diff = (other as isize).wrapping_sub(self as isize);
630 Some(diff / size as isize)
636 impl<T: ?Sized> *mut T {
637 /// Returns `true` if the pointer is null.
644 /// let mut s = [1, 2, 3];
645 /// let ptr: *mut u32 = s.as_mut_ptr();
646 /// assert!(!ptr.is_null());
648 #[stable(feature = "rust1", since = "1.0.0")]
650 pub fn is_null(self) -> bool where T: Sized {
654 /// Returns `None` if the pointer is null, or else returns a reference to
655 /// the value wrapped in `Some`.
659 /// While this method and its mutable counterpart are useful for
660 /// null-safety, it is important to note that this is still an unsafe
661 /// operation because the returned value could be pointing to invalid
664 /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
665 /// not necessarily reflect the actual lifetime of the data.
672 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
675 /// if let Some(val_back) = ptr.as_ref() {
676 /// println!("We got back the value: {}!", val_back);
680 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
682 pub unsafe fn as_ref<'a>(self) -> Option<&'a T> where T: Sized {
690 /// Calculates the offset from a pointer. `count` is in units of T; e.g. a
691 /// `count` of 3 represents a pointer offset of `3 * size_of::<T>()` bytes.
695 /// The offset must be in-bounds of the object, or one-byte-past-the-end.
696 /// Otherwise `offset` invokes Undefined Behavior, regardless of whether
697 /// the pointer is used.
704 /// let mut s = [1, 2, 3];
705 /// let ptr: *mut u32 = s.as_mut_ptr();
708 /// println!("{}", *ptr.offset(1));
709 /// println!("{}", *ptr.offset(2));
712 #[stable(feature = "rust1", since = "1.0.0")]
714 pub unsafe fn offset(self, count: isize) -> *mut T where T: Sized {
715 intrinsics::offset(self, count) as *mut T
718 /// Calculates the offset from a pointer using wrapping arithmetic.
719 /// `count` is in units of T; e.g. a `count` of 3 represents a pointer
720 /// offset of `3 * size_of::<T>()` bytes.
724 /// The resulting pointer does not need to be in bounds, but it is
725 /// potentially hazardous to dereference (which requires `unsafe`).
727 /// Always use `.offset(count)` instead when possible, because `offset`
728 /// allows the compiler to optimize better.
735 /// // Iterate using a raw pointer in increments of two elements
736 /// let mut data = [1u8, 2, 3, 4, 5];
737 /// let mut ptr: *mut u8 = data.as_mut_ptr();
739 /// let end_rounded_up = ptr.wrapping_offset(6);
741 /// while ptr != end_rounded_up {
745 /// ptr = ptr.wrapping_offset(step);
747 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
749 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
751 pub fn wrapping_offset(self, count: isize) -> *mut T where T: Sized {
753 intrinsics::arith_offset(self, count) as *mut T
757 /// Returns `None` if the pointer is null, or else returns a mutable
758 /// reference to the value wrapped in `Some`.
762 /// As with `as_ref`, this is unsafe because it cannot verify the validity
763 /// of the returned pointer, nor can it ensure that the lifetime `'a`
764 /// returned is indeed a valid lifetime for the contained data.
771 /// let mut s = [1, 2, 3];
772 /// let ptr: *mut u32 = s.as_mut_ptr();
773 /// let first_value = unsafe { ptr.as_mut().unwrap() };
774 /// *first_value = 4;
775 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
777 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
779 pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> where T: Sized {
787 /// Calculates the distance between two pointers. The returned value is in
788 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
790 /// If the address different between the two pointers ia not a multiple of
791 /// `mem::size_of::<T>()` then the result of the division is rounded towards
794 /// This function returns `None` if `T` is a zero-sized typed.
801 /// #![feature(offset_to)]
804 /// let mut a = [0; 5];
805 /// let ptr1: *mut i32 = &mut a[1];
806 /// let ptr2: *mut i32 = &mut a[3];
807 /// assert_eq!(ptr1.offset_to(ptr2), Some(2));
808 /// assert_eq!(ptr2.offset_to(ptr1), Some(-2));
809 /// assert_eq!(unsafe { ptr1.offset(2) }, ptr2);
810 /// assert_eq!(unsafe { ptr2.offset(-2) }, ptr1);
813 #[unstable(feature = "offset_to", issue = "41079")]
815 pub fn offset_to(self, other: *const T) -> Option<isize> where T: Sized {
816 let size = mem::size_of::<T>();
820 let diff = (other as isize).wrapping_sub(self as isize);
821 Some(diff / size as isize)
826 // Equality for pointers
827 #[stable(feature = "rust1", since = "1.0.0")]
828 impl<T: ?Sized> PartialEq for *const T {
830 fn eq(&self, other: &*const T) -> bool { *self == *other }
833 #[stable(feature = "rust1", since = "1.0.0")]
834 impl<T: ?Sized> Eq for *const T {}
836 #[stable(feature = "rust1", since = "1.0.0")]
837 impl<T: ?Sized> PartialEq for *mut T {
839 fn eq(&self, other: &*mut T) -> bool { *self == *other }
842 #[stable(feature = "rust1", since = "1.0.0")]
843 impl<T: ?Sized> Eq for *mut T {}
845 /// Compare raw pointers for equality.
847 /// This is the same as using the `==` operator, but less generic:
848 /// the arguments have to be `*const T` raw pointers,
849 /// not anything that implements `PartialEq`.
851 /// This can be used to compare `&T` references (which coerce to `*const T` implicitly)
852 /// by their address rather than comparing the values they point to
853 /// (which is what the `PartialEq for &T` implementation does).
861 /// let other_five = 5;
862 /// let five_ref = &five;
863 /// let same_five_ref = &five;
864 /// let other_five_ref = &other_five;
866 /// assert!(five_ref == same_five_ref);
867 /// assert!(five_ref == other_five_ref);
869 /// assert!(ptr::eq(five_ref, same_five_ref));
870 /// assert!(!ptr::eq(five_ref, other_five_ref));
872 #[stable(feature = "ptr_eq", since = "1.17.0")]
874 pub fn eq<T: ?Sized>(a: *const T, b: *const T) -> bool {
878 #[stable(feature = "rust1", since = "1.0.0")]
880 impl<T: ?Sized> Clone for *const T {
882 fn clone(&self) -> *const T {
887 #[stable(feature = "rust1", since = "1.0.0")]
889 impl<T: ?Sized> Clone for *mut T {
891 fn clone(&self) -> *mut T {
896 // Impls for function pointers
897 macro_rules! fnptr_impls_safety_abi {
898 ($FnTy: ty, $($Arg: ident),*) => {
899 #[stable(feature = "rust1", since = "1.0.0")]
901 impl<Ret, $($Arg),*> Clone for $FnTy {
903 fn clone(&self) -> Self {
908 #[stable(feature = "fnptr_impls", since = "1.4.0")]
909 impl<Ret, $($Arg),*> PartialEq for $FnTy {
911 fn eq(&self, other: &Self) -> bool {
912 *self as usize == *other as usize
916 #[stable(feature = "fnptr_impls", since = "1.4.0")]
917 impl<Ret, $($Arg),*> Eq for $FnTy {}
919 #[stable(feature = "fnptr_impls", since = "1.4.0")]
920 impl<Ret, $($Arg),*> PartialOrd for $FnTy {
922 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
923 (*self as usize).partial_cmp(&(*other as usize))
927 #[stable(feature = "fnptr_impls", since = "1.4.0")]
928 impl<Ret, $($Arg),*> Ord for $FnTy {
930 fn cmp(&self, other: &Self) -> Ordering {
931 (*self as usize).cmp(&(*other as usize))
935 #[stable(feature = "fnptr_impls", since = "1.4.0")]
936 impl<Ret, $($Arg),*> hash::Hash for $FnTy {
937 fn hash<HH: hash::Hasher>(&self, state: &mut HH) {
938 state.write_usize(*self as usize)
942 #[stable(feature = "fnptr_impls", since = "1.4.0")]
943 impl<Ret, $($Arg),*> fmt::Pointer for $FnTy {
944 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
945 fmt::Pointer::fmt(&(*self as *const ()), f)
949 #[stable(feature = "fnptr_impls", since = "1.4.0")]
950 impl<Ret, $($Arg),*> fmt::Debug for $FnTy {
951 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
952 fmt::Pointer::fmt(&(*self as *const ()), f)
958 macro_rules! fnptr_impls_args {
959 ($($Arg: ident),+) => {
960 fnptr_impls_safety_abi! { extern "Rust" fn($($Arg),*) -> Ret, $($Arg),* }
961 fnptr_impls_safety_abi! { extern "C" fn($($Arg),*) -> Ret, $($Arg),* }
962 fnptr_impls_safety_abi! { extern "C" fn($($Arg),* , ...) -> Ret, $($Arg),* }
963 fnptr_impls_safety_abi! { unsafe extern "Rust" fn($($Arg),*) -> Ret, $($Arg),* }
964 fnptr_impls_safety_abi! { unsafe extern "C" fn($($Arg),*) -> Ret, $($Arg),* }
965 fnptr_impls_safety_abi! { unsafe extern "C" fn($($Arg),* , ...) -> Ret, $($Arg),* }
968 // No variadic functions with 0 parameters
969 fnptr_impls_safety_abi! { extern "Rust" fn() -> Ret, }
970 fnptr_impls_safety_abi! { extern "C" fn() -> Ret, }
971 fnptr_impls_safety_abi! { unsafe extern "Rust" fn() -> Ret, }
972 fnptr_impls_safety_abi! { unsafe extern "C" fn() -> Ret, }
976 fnptr_impls_args! { }
977 fnptr_impls_args! { A }
978 fnptr_impls_args! { A, B }
979 fnptr_impls_args! { A, B, C }
980 fnptr_impls_args! { A, B, C, D }
981 fnptr_impls_args! { A, B, C, D, E }
982 fnptr_impls_args! { A, B, C, D, E, F }
983 fnptr_impls_args! { A, B, C, D, E, F, G }
984 fnptr_impls_args! { A, B, C, D, E, F, G, H }
985 fnptr_impls_args! { A, B, C, D, E, F, G, H, I }
986 fnptr_impls_args! { A, B, C, D, E, F, G, H, I, J }
987 fnptr_impls_args! { A, B, C, D, E, F, G, H, I, J, K }
988 fnptr_impls_args! { A, B, C, D, E, F, G, H, I, J, K, L }
990 // Comparison for pointers
991 #[stable(feature = "rust1", since = "1.0.0")]
992 impl<T: ?Sized> Ord for *const T {
994 fn cmp(&self, other: &*const T) -> Ordering {
997 } else if self == other {
1005 #[stable(feature = "rust1", since = "1.0.0")]
1006 impl<T: ?Sized> PartialOrd for *const T {
1008 fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1009 Some(self.cmp(other))
1013 fn lt(&self, other: &*const T) -> bool { *self < *other }
1016 fn le(&self, other: &*const T) -> bool { *self <= *other }
1019 fn gt(&self, other: &*const T) -> bool { *self > *other }
1022 fn ge(&self, other: &*const T) -> bool { *self >= *other }
1025 #[stable(feature = "rust1", since = "1.0.0")]
1026 impl<T: ?Sized> Ord for *mut T {
1028 fn cmp(&self, other: &*mut T) -> Ordering {
1031 } else if self == other {
1039 #[stable(feature = "rust1", since = "1.0.0")]
1040 impl<T: ?Sized> PartialOrd for *mut T {
1042 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
1043 Some(self.cmp(other))
1047 fn lt(&self, other: &*mut T) -> bool { *self < *other }
1050 fn le(&self, other: &*mut T) -> bool { *self <= *other }
1053 fn gt(&self, other: &*mut T) -> bool { *self > *other }
1056 fn ge(&self, other: &*mut T) -> bool { *self >= *other }
1059 /// A wrapper around a raw non-null `*mut T` that indicates that the possessor
1060 /// of this wrapper owns the referent. Useful for building abstractions like
1061 /// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
1063 /// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
1064 /// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
1065 /// the kind of strong aliasing guarantees an instance of `T` can expect:
1066 /// the referent of the pointer should not be modified without a unique path to
1067 /// its owning Unique.
1069 /// If you're uncertain of whether it's correct to use `Unique` for your purposes,
1070 /// consider using `Shared`, which has weaker semantics.
1072 /// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
1073 /// is never dereferenced. This is so that enums may use this forbidden value
1074 /// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
1075 /// However the pointer may still dangle if it isn't dereferenced.
1077 /// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
1078 /// for any type which upholds Unique's aliasing requirements.
1079 #[allow(missing_debug_implementations)]
1080 #[unstable(feature = "unique", reason = "needs an RFC to flesh out design",
1082 pub struct Unique<T: ?Sized> {
1083 pointer: NonZero<*const T>,
1084 // NOTE: this marker has no consequences for variance, but is necessary
1085 // for dropck to understand that we logically own a `T`.
1087 // For details, see:
1088 // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
1089 _marker: PhantomData<T>,
1092 /// `Unique` pointers are `Send` if `T` is `Send` because the data they
1093 /// reference is unaliased. Note that this aliasing invariant is
1094 /// unenforced by the type system; the abstraction using the
1095 /// `Unique` must enforce it.
1096 #[unstable(feature = "unique", issue = "27730")]
1097 unsafe impl<T: Send + ?Sized> Send for Unique<T> { }
1099 /// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
1100 /// reference is unaliased. Note that this aliasing invariant is
1101 /// unenforced by the type system; the abstraction using the
1102 /// `Unique` must enforce it.
1103 #[unstable(feature = "unique", issue = "27730")]
1104 unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { }
1106 #[unstable(feature = "unique", issue = "27730")]
1107 impl<T: Sized> Unique<T> {
1108 /// Creates a new `Unique` that is dangling, but well-aligned.
1110 /// This is useful for initializing types which lazily allocate, like
1111 /// `Vec::new` does.
1112 pub fn empty() -> Self {
1114 let ptr = mem::align_of::<T>() as *mut T;
1115 Unique::new_unchecked(ptr)
1120 #[unstable(feature = "unique", issue = "27730")]
1121 impl<T: ?Sized> Unique<T> {
1122 /// Creates a new `Unique`.
1126 /// `ptr` must be non-null.
1127 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
1128 Unique { pointer: NonZero::new_unchecked(ptr), _marker: PhantomData }
1131 /// Creates a new `Unique` if `ptr` is non-null.
1132 pub fn new(ptr: *mut T) -> Option<Self> {
1133 NonZero::new(ptr as *const T).map(|nz| Unique { pointer: nz, _marker: PhantomData })
1136 /// Acquires the underlying `*mut` pointer.
1137 pub fn as_ptr(self) -> *mut T {
1138 self.pointer.get() as *mut T
1141 /// Dereferences the content.
1143 /// The resulting lifetime is bound to self so this behaves "as if"
1144 /// it were actually an instance of T that is getting borrowed. If a longer
1145 /// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
1146 pub unsafe fn as_ref(&self) -> &T {
1150 /// Mutably dereferences the content.
1152 /// The resulting lifetime is bound to self so this behaves "as if"
1153 /// it were actually an instance of T that is getting borrowed. If a longer
1154 /// (unbound) lifetime is needed, use `&mut *my_ptr.ptr()`.
1155 pub unsafe fn as_mut(&mut self) -> &mut T {
1160 #[unstable(feature = "unique", issue = "27730")]
1161 impl<T: ?Sized> Clone for Unique<T> {
1162 fn clone(&self) -> Self {
1167 #[unstable(feature = "unique", issue = "27730")]
1168 impl<T: ?Sized> Copy for Unique<T> { }
1170 #[unstable(feature = "unique", issue = "27730")]
1171 impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> { }
1173 #[unstable(feature = "unique", issue = "27730")]
1174 impl<T: ?Sized> fmt::Pointer for Unique<T> {
1175 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1176 fmt::Pointer::fmt(&self.as_ptr(), f)
1180 #[unstable(feature = "unique", issue = "27730")]
1181 impl<'a, T: ?Sized> From<&'a mut T> for Unique<T> {
1182 fn from(reference: &'a mut T) -> Self {
1183 Unique { pointer: NonZero::from(reference), _marker: PhantomData }
1187 #[unstable(feature = "unique", issue = "27730")]
1188 impl<'a, T: ?Sized> From<&'a T> for Unique<T> {
1189 fn from(reference: &'a T) -> Self {
1190 Unique { pointer: NonZero::from(reference), _marker: PhantomData }
1194 /// A wrapper around a raw `*mut T` that indicates that the possessor
1195 /// of this wrapper has shared ownership of the referent. Useful for
1196 /// building abstractions like `Rc<T>`, `Arc<T>`, or doubly-linked lists, which
1197 /// internally use aliased raw pointers to manage the memory that they own.
1199 /// This is similar to `Unique`, except that it doesn't make any aliasing
1200 /// guarantees, and doesn't derive Send and Sync. Note that unlike `&T`,
1201 /// Shared has no special mutability requirements. Shared may mutate data
1202 /// aliased by other Shared pointers. More precise rules require Rust to
1203 /// develop an actual aliasing model.
1205 /// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
1206 /// is never dereferenced. This is so that enums may use this forbidden value
1207 /// as a discriminant -- `Option<Shared<T>>` has the same size as `Shared<T>`.
1208 /// However the pointer may still dangle if it isn't dereferenced.
1210 /// Unlike `*mut T`, `Shared<T>` is covariant over `T`. If this is incorrect
1211 /// for your use case, you should include some PhantomData in your type to
1212 /// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
1213 /// Usually this won't be necessary; covariance is correct for Rc, Arc, and LinkedList
1214 /// because they provide a public API that follows the normal shared XOR mutable
1216 #[allow(missing_debug_implementations)]
1217 #[unstable(feature = "shared", reason = "needs an RFC to flesh out design",
1219 pub struct Shared<T: ?Sized> {
1220 pointer: NonZero<*const T>,
1221 // NOTE: this marker has no consequences for variance, but is necessary
1222 // for dropck to understand that we logically own a `T`.
1224 // For details, see:
1225 // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
1226 _marker: PhantomData<T>,
1229 /// `Shared` pointers are not `Send` because the data they reference may be aliased.
1230 // NB: This impl is unnecessary, but should provide better error messages.
1231 #[unstable(feature = "shared", issue = "27730")]
1232 impl<T: ?Sized> !Send for Shared<T> { }
1234 /// `Shared` pointers are not `Sync` because the data they reference may be aliased.
1235 // NB: This impl is unnecessary, but should provide better error messages.
1236 #[unstable(feature = "shared", issue = "27730")]
1237 impl<T: ?Sized> !Sync for Shared<T> { }
1239 #[unstable(feature = "shared", issue = "27730")]
1240 impl<T: Sized> Shared<T> {
1241 /// Creates a new `Shared` that is dangling, but well-aligned.
1243 /// This is useful for initializing types which lazily allocate, like
1244 /// `Vec::new` does.
1245 pub fn empty() -> Self {
1247 let ptr = mem::align_of::<T>() as *mut T;
1248 Shared::new_unchecked(ptr)
1253 #[unstable(feature = "shared", issue = "27730")]
1254 impl<T: ?Sized> Shared<T> {
1255 /// Creates a new `Shared`.
1259 /// `ptr` must be non-null.
1260 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
1261 Shared { pointer: NonZero::new_unchecked(ptr), _marker: PhantomData }
1264 /// Creates a new `Shared` if `ptr` is non-null.
1265 pub fn new(ptr: *mut T) -> Option<Self> {
1266 NonZero::new(ptr as *const T).map(|nz| Shared { pointer: nz, _marker: PhantomData })
1269 /// Acquires the underlying `*mut` pointer.
1270 pub fn as_ptr(self) -> *mut T {
1271 self.pointer.get() as *mut T
1274 /// Dereferences the content.
1276 /// The resulting lifetime is bound to self so this behaves "as if"
1277 /// it were actually an instance of T that is getting borrowed. If a longer
1278 /// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.
1279 pub unsafe fn as_ref(&self) -> &T {
1283 /// Mutably dereferences the content.
1285 /// The resulting lifetime is bound to self so this behaves "as if"
1286 /// it were actually an instance of T that is getting borrowed. If a longer
1287 /// (unbound) lifetime is needed, use `&mut *my_ptr.ptr_mut()`.
1288 pub unsafe fn as_mut(&mut self) -> &mut T {
1292 /// Acquires the underlying pointer as a `*mut` pointer.
1293 #[rustc_deprecated(since = "1.19", reason = "renamed to `as_ptr` for ergonomics/consistency")]
1294 #[unstable(feature = "shared", issue = "27730")]
1295 pub unsafe fn as_mut_ptr(&self) -> *mut T {
1300 #[unstable(feature = "shared", issue = "27730")]
1301 impl<T: ?Sized> Clone for Shared<T> {
1302 fn clone(&self) -> Self {
1307 #[unstable(feature = "shared", issue = "27730")]
1308 impl<T: ?Sized> Copy for Shared<T> { }
1310 #[unstable(feature = "shared", issue = "27730")]
1311 impl<T: ?Sized, U: ?Sized> CoerceUnsized<Shared<U>> for Shared<T> where T: Unsize<U> { }
1313 #[unstable(feature = "shared", issue = "27730")]
1314 impl<T: ?Sized> fmt::Pointer for Shared<T> {
1315 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1316 fmt::Pointer::fmt(&self.as_ptr(), f)
1320 #[unstable(feature = "shared", issue = "27730")]
1321 impl<T: ?Sized> From<Unique<T>> for Shared<T> {
1322 fn from(unique: Unique<T>) -> Self {
1323 Shared { pointer: unique.pointer, _marker: PhantomData }
1327 #[unstable(feature = "shared", issue = "27730")]
1328 impl<'a, T: ?Sized> From<&'a mut T> for Shared<T> {
1329 fn from(reference: &'a mut T) -> Self {
1330 Shared { pointer: NonZero::from(reference), _marker: PhantomData }
1334 #[unstable(feature = "shared", issue = "27730")]
1335 impl<'a, T: ?Sized> From<&'a T> for Shared<T> {
1336 fn from(reference: &'a T) -> Self {
1337 Shared { pointer: NonZero::from(reference), _marker: PhantomData }