1 use core::cell::RefCell;
2 use core::num::NonZeroUsize;
5 use std::fmt::{Debug, Display};
8 fn test_const_from_raw_parts() {
9 const SLICE: &[u8] = &[1, 2, 3, 4];
10 const FROM_RAW: &[u8] = unsafe { &*slice_from_raw_parts(SLICE.as_ptr(), SLICE.len()) };
11 assert_eq!(SLICE, FROM_RAW);
13 let slice = &[1, 2, 3, 4, 5];
14 let from_raw = unsafe { &*slice_from_raw_parts(slice.as_ptr(), 2) };
15 assert_eq!(&slice[..2], from_raw);
25 let mut p = Pair { fst: 10, snd: 20 };
26 let pptr: *mut Pair = &mut p;
27 let iptr: *mut isize = pptr as *mut isize;
28 assert_eq!(*iptr, 10);
30 assert_eq!(*iptr, 30);
31 assert_eq!(p.fst, 30);
33 *pptr = Pair { fst: 50, snd: 60 };
34 assert_eq!(*iptr, 50);
35 assert_eq!(p.fst, 50);
36 assert_eq!(p.snd, 60);
38 let v0 = vec![32000u16, 32001u16, 32002u16];
39 let mut v1 = vec![0u16, 0u16, 0u16];
41 copy(v0.as_ptr().offset(1), v1.as_mut_ptr().offset(1), 1);
42 assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16));
43 copy(v0.as_ptr().offset(2), v1.as_mut_ptr(), 1);
44 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 0u16));
45 copy(v0.as_ptr(), v1.as_mut_ptr().offset(2), 1);
46 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 32000u16));
52 let p: *const isize = null();
55 let q = p.wrapping_offset(1);
56 assert!(!q.is_null());
58 let mp: *mut isize = null_mut();
59 assert!(mp.is_null());
61 let mq = mp.wrapping_offset(1);
62 assert!(!mq.is_null());
64 // Pointers to unsized types -- slices
65 let s: &mut [u8] = &mut [1, 2, 3];
66 let cs: *const [u8] = s;
67 assert!(!cs.is_null());
69 let ms: *mut [u8] = s;
70 assert!(!ms.is_null());
72 let cz: *const [u8] = &[];
73 assert!(!cz.is_null());
75 let mz: *mut [u8] = &mut [];
76 assert!(!mz.is_null());
78 let ncs: *const [u8] = null::<[u8; 3]>();
79 assert!(ncs.is_null());
81 let nms: *mut [u8] = null_mut::<[u8; 3]>();
82 assert!(nms.is_null());
84 // Pointers to unsized types -- trait objects
85 let ci: *const dyn ToString = &3;
86 assert!(!ci.is_null());
88 let mi: *mut dyn ToString = &mut 3;
89 assert!(!mi.is_null());
91 let nci: *const dyn ToString = null::<isize>();
92 assert!(nci.is_null());
94 let nmi: *mut dyn ToString = null_mut::<isize>();
95 assert!(nmi.is_null());
101 let p: *const isize = null();
102 assert_eq!(p.as_ref(), None);
104 let q: *const isize = &2;
105 assert_eq!(q.as_ref().unwrap(), &2);
107 let p: *mut isize = null_mut();
108 assert_eq!(p.as_ref(), None);
110 let q: *mut isize = &mut 2;
111 assert_eq!(q.as_ref().unwrap(), &2);
113 // Lifetime inference
116 let p = &u as *const isize;
117 assert_eq!(p.as_ref().unwrap(), &2);
120 // Pointers to unsized types -- slices
121 let s: &mut [u8] = &mut [1, 2, 3];
122 let cs: *const [u8] = s;
123 assert_eq!(cs.as_ref(), Some(&*s));
125 let ms: *mut [u8] = s;
126 assert_eq!(ms.as_ref(), Some(&*s));
128 let cz: *const [u8] = &[];
129 assert_eq!(cz.as_ref(), Some(&[][..]));
131 let mz: *mut [u8] = &mut [];
132 assert_eq!(mz.as_ref(), Some(&[][..]));
134 let ncs: *const [u8] = null::<[u8; 3]>();
135 assert_eq!(ncs.as_ref(), None);
137 let nms: *mut [u8] = null_mut::<[u8; 3]>();
138 assert_eq!(nms.as_ref(), None);
140 // Pointers to unsized types -- trait objects
141 let ci: *const dyn ToString = &3;
142 assert!(ci.as_ref().is_some());
144 let mi: *mut dyn ToString = &mut 3;
145 assert!(mi.as_ref().is_some());
147 let nci: *const dyn ToString = null::<isize>();
148 assert!(nci.as_ref().is_none());
150 let nmi: *mut dyn ToString = null_mut::<isize>();
151 assert!(nmi.as_ref().is_none());
158 let p: *mut isize = null_mut();
159 assert!(p.as_mut() == None);
161 let q: *mut isize = &mut 2;
162 assert!(q.as_mut().unwrap() == &mut 2);
164 // Lifetime inference
167 let p = &mut u as *mut isize;
168 assert!(p.as_mut().unwrap() == &mut 2);
171 // Pointers to unsized types -- slices
172 let s: &mut [u8] = &mut [1, 2, 3];
173 let ms: *mut [u8] = s;
174 assert_eq!(ms.as_mut(), Some(&mut [1, 2, 3][..]));
176 let mz: *mut [u8] = &mut [];
177 assert_eq!(mz.as_mut(), Some(&mut [][..]));
179 let nms: *mut [u8] = null_mut::<[u8; 3]>();
180 assert_eq!(nms.as_mut(), None);
182 // Pointers to unsized types -- trait objects
183 let mi: *mut dyn ToString = &mut 3;
184 assert!(mi.as_mut().is_some());
186 let nmi: *mut dyn ToString = null_mut::<isize>();
187 assert!(nmi.as_mut().is_none());
192 fn test_ptr_addition() {
194 let xs = vec![5; 16];
195 let mut ptr = xs.as_ptr();
196 let end = ptr.offset(16);
204 let mut m_ptr = xs_mut.as_mut_ptr();
205 let m_end = m_ptr.offset(16);
207 while m_ptr < m_end {
209 m_ptr = m_ptr.offset(1);
212 assert!(xs_mut == vec![10; 16]);
217 fn test_ptr_subtraction() {
219 let xs = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
221 let ptr = xs.as_ptr();
224 assert_eq!(*(ptr.offset(idx as isize)), idx as isize);
229 let m_start = xs_mut.as_mut_ptr();
230 let mut m_ptr = m_start.offset(9);
234 if m_ptr == m_start {
237 m_ptr = m_ptr.offset(-1);
240 assert_eq!(xs_mut, [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
245 fn test_set_memory() {
246 let mut xs = [0u8; 20];
247 let ptr = xs.as_mut_ptr();
249 write_bytes(ptr, 5u8, xs.len());
251 assert!(xs == [5u8; 20]);
255 fn test_set_memory_const() {
256 const XS: [u8; 20] = {
257 let mut xs = [0u8; 20];
258 let ptr = xs.as_mut_ptr();
260 ptr.write_bytes(5u8, xs.len());
265 assert!(XS == [5u8; 20]);
269 fn test_unsized_nonnull() {
270 let xs: &[i32] = &[1, 2, 3];
271 let ptr = unsafe { NonNull::new_unchecked(xs as *const [i32] as *mut [i32]) };
272 let ys = unsafe { ptr.as_ref() };
273 let zs: &[i32] = &[1, 2, 3];
278 fn test_const_nonnull_new() {
280 assert!(NonNull::new(core::ptr::null_mut::<()>()).is_none());
282 let value = &mut 0u32;
283 let mut ptr = NonNull::new(value).unwrap();
284 unsafe { *ptr.as_mut() = 42 };
286 let reference = unsafe { &*ptr.as_ref() };
287 assert!(*reference == *value);
288 assert!(*reference == 42);
293 #[cfg(unix)] // printf may not be available on other platforms
294 #[allow(deprecated)] // For SipHasher
295 pub fn test_variadic_fnptr() {
297 use core::hash::{Hash, SipHasher};
299 // This needs to use the correct function signature even though it isn't called as some
300 // codegen backends make it UB to declare a function with multiple conflicting signatures
301 // (like LLVM) while others straight up return an error (like Cranelift).
302 fn printf(_: *const ffi::c_char, ...) -> ffi::c_int;
304 let p: unsafe extern "C" fn(*const ffi::c_char, ...) -> ffi::c_int = printf;
308 let mut s = SipHasher::new();
309 assert_eq!(p.hash(&mut s), q.hash(&mut s));
313 fn write_unaligned_drop() {
315 static DROPS: RefCell<Vec<u32>> = RefCell::new(Vec::new());
320 impl Drop for Dropper {
322 DROPS.with(|d| d.borrow_mut().push(self.0));
328 let mut t = Dropper(1);
330 write_unaligned(&mut t, c);
333 DROPS.with(|d| assert_eq!(*d.borrow(), [0]));
337 fn align_offset_zst() {
338 // For pointers of stride = 0, the pointer is already aligned or it cannot be aligned at
339 // all, because no amount of elements will align the pointer.
342 assert_eq!((p as *const ()).align_offset(p), 0);
344 assert_eq!(((p + 1) as *const ()).align_offset(p), !0);
346 p = (p + 1).next_power_of_two();
351 fn align_offset_stride1() {
352 // For pointers of stride = 1, the pointer can always be aligned. The offset is equal to
356 for ptr in 1..2 * align {
357 let expected = ptr % align;
358 let offset = if expected == 0 { 0 } else { align - expected };
360 (ptr as *const u8).align_offset(align),
362 "ptr = {}, align = {}, size = 1",
367 align = (align + 1).next_power_of_two();
372 fn align_offset_weird_strides() {
381 struct A7(u32, u16, u8);
385 struct A9(u32, u32, u8);
387 struct A10(u32, u32, u16);
389 unsafe fn test_weird_stride<T>(ptr: *const T, align: usize) -> bool {
390 let numptr = ptr as usize;
391 let mut expected = usize::MAX;
392 // Naive but definitely correct way to find the *first* aligned element of stride::<T>.
394 if (numptr + el * ::std::mem::size_of::<T>()) % align == 0 {
399 let got = ptr.align_offset(align);
402 "aligning {:p} (with stride of {}) to {}, expected {}, got {}",
404 ::std::mem::size_of::<T>(),
414 // For pointers of stride != 1, we verify the algorithm against the naivest possible
419 let limit = if cfg!(miri) { 32 } else { 1024 };
420 while align < limit {
421 for ptr in 1usize..4 * align {
423 x |= test_weird_stride::<A3>(ptr as *const A3, align);
424 x |= test_weird_stride::<A4>(ptr as *const A4, align);
425 x |= test_weird_stride::<A5>(ptr as *const A5, align);
426 x |= test_weird_stride::<A6>(ptr as *const A6, align);
427 x |= test_weird_stride::<A7>(ptr as *const A7, align);
428 x |= test_weird_stride::<A8>(ptr as *const A8, align);
429 x |= test_weird_stride::<A9>(ptr as *const A9, align);
430 x |= test_weird_stride::<A10>(ptr as *const A10, align);
433 align = (align + 1).next_power_of_two();
441 let ptr1: *mut i32 = &mut a[1];
442 let ptr2: *mut i32 = &mut a[3];
444 assert_eq!(ptr2.offset_from(ptr1), 2);
445 assert_eq!(ptr1.offset_from(ptr2), -2);
446 assert_eq!(ptr1.offset(2), ptr2);
447 assert_eq!(ptr2.offset(-2), ptr1);
454 struct Pair<A, B: ?Sized>(A, B);
458 let () = metadata(&());
459 let () = metadata(&Unit);
460 let () = metadata(&4_u32);
461 let () = metadata(&String::new());
462 let () = metadata(&Some(4_u32));
463 let () = metadata(&ptr_metadata);
464 let () = metadata(&|| {});
465 let () = metadata(&[4, 7]);
466 let () = metadata(&(4, String::new()));
467 let () = metadata(&Pair(4, String::new()));
468 let () = metadata(0 as *const Extern);
469 let () = metadata(0 as *const <&u32 as std::ops::Deref>::Target);
471 assert_eq!(metadata("foo"), 3_usize);
472 assert_eq!(metadata(&[4, 7][..]), 2_usize);
474 let dst_tuple: &(bool, [u8]) = &(true, [0x66, 0x6F, 0x6F]);
475 let dst_struct: &Pair<bool, [u8]> = &Pair(true, [0x66, 0x6F, 0x6F]);
476 assert_eq!(metadata(dst_tuple), 3_usize);
477 assert_eq!(metadata(dst_struct), 3_usize);
479 let dst_tuple: &(bool, str) = std::mem::transmute(dst_tuple);
480 let dst_struct: &Pair<bool, str> = std::mem::transmute(dst_struct);
481 assert_eq!(&dst_tuple.1, "foo");
482 assert_eq!(&dst_struct.1, "foo");
483 assert_eq!(metadata(dst_tuple), 3_usize);
484 assert_eq!(metadata(dst_struct), 3_usize);
487 let vtable_1: DynMetadata<dyn Debug> = metadata(&4_u16 as &dyn Debug);
488 let vtable_2: DynMetadata<dyn Display> = metadata(&4_u16 as &dyn Display);
489 let vtable_3: DynMetadata<dyn Display> = metadata(&4_u32 as &dyn Display);
490 let vtable_4: DynMetadata<dyn Display> = metadata(&(true, 7_u32) as &(bool, dyn Display));
491 let vtable_5: DynMetadata<dyn Display> =
492 metadata(&Pair(true, 7_u32) as &Pair<bool, dyn Display>);
494 let address_1: *const () = std::mem::transmute(vtable_1);
495 let address_2: *const () = std::mem::transmute(vtable_2);
496 let address_3: *const () = std::mem::transmute(vtable_3);
497 let address_4: *const () = std::mem::transmute(vtable_4);
498 let address_5: *const () = std::mem::transmute(vtable_5);
499 // Different trait => different vtable pointer
500 assert_ne!(address_1, address_2);
501 // Different erased type => different vtable pointer
502 assert_ne!(address_2, address_3);
503 // Same erased type and same trait => same vtable pointer
504 assert_eq!(address_3, address_4);
505 assert_eq!(address_3, address_5);
510 fn ptr_metadata_bounds() {
511 fn metadata_eq_method_address<T: ?Sized>() -> usize {
512 // The `Metadata` associated type has an `Ord` bound, so this is valid:
513 <<T as Pointee>::Metadata as PartialEq>::eq as usize
515 // "Synthetic" trait impls generated by the compiler like those of `Pointee`
516 // are not checked for bounds of associated type.
517 // So with a buggy libcore we could have both:
518 // * `<dyn Display as Pointee>::Metadata == DynMetadata`
519 // * `DynMetadata: !PartialEq`
520 // … and cause an ICE here:
521 metadata_eq_method_address::<dyn Display>();
523 // For this reason, let’s check here that bounds are satisfied:
525 let _ = static_assert_expected_bounds_for_metadata::<()>;
526 let _ = static_assert_expected_bounds_for_metadata::<usize>;
527 let _ = static_assert_expected_bounds_for_metadata::<DynMetadata<dyn Display>>;
528 fn _static_assert_associated_type<T: ?Sized>() {
529 let _ = static_assert_expected_bounds_for_metadata::<<T as Pointee>::Metadata>;
532 fn static_assert_expected_bounds_for_metadata<Meta>()
534 // Keep this in sync with the associated type in `library/core/src/ptr/metadata.rs`
535 Meta: Copy + Send + Sync + Ord + std::hash::Hash + Unpin,
544 struct Something([u8; 47]);
546 let value = Something([0; 47]);
547 let trait_object: &dyn Debug = &value;
548 let meta = metadata(trait_object);
550 assert_eq!(meta.size_of(), 64);
551 assert_eq!(meta.size_of(), std::mem::size_of::<Something>());
552 assert_eq!(meta.align_of(), 32);
553 assert_eq!(meta.align_of(), std::mem::align_of::<Something>());
554 assert_eq!(meta.layout(), std::alloc::Layout::new::<Something>());
556 assert!(format!("{meta:?}").starts_with("DynMetadata(0x"));
560 fn from_raw_parts() {
561 let mut value = 5_u32;
562 let address = &mut value as *mut _ as *mut ();
563 let trait_object: &dyn Display = &mut value;
564 let vtable = metadata(trait_object);
565 let trait_object = NonNull::from(trait_object);
567 assert_eq!(ptr::from_raw_parts(address, vtable), trait_object.as_ptr());
568 assert_eq!(ptr::from_raw_parts_mut(address, vtable), trait_object.as_ptr());
569 assert_eq!(NonNull::from_raw_parts(NonNull::new(address).unwrap(), vtable), trait_object);
571 let mut array = [5_u32, 5, 5, 5, 5];
572 let address = &mut array as *mut _ as *mut ();
573 let array_ptr = NonNull::from(&mut array);
574 let slice_ptr = NonNull::from(&mut array[..]);
576 assert_eq!(ptr::from_raw_parts(address, ()), array_ptr.as_ptr());
577 assert_eq!(ptr::from_raw_parts_mut(address, ()), array_ptr.as_ptr());
578 assert_eq!(NonNull::from_raw_parts(NonNull::new(address).unwrap(), ()), array_ptr);
580 assert_eq!(ptr::from_raw_parts(address, 5), slice_ptr.as_ptr());
581 assert_eq!(ptr::from_raw_parts_mut(address, 5), slice_ptr.as_ptr());
582 assert_eq!(NonNull::from_raw_parts(NonNull::new(address).unwrap(), 5), slice_ptr);
587 let foo = ThinBox::<dyn Display>::new(4);
588 assert_eq!(foo.to_string(), "4");
590 let bar = ThinBox::<dyn Display>::new(7);
591 assert_eq!(bar.to_string(), "7");
593 // A slightly more interesting library that could be built on top of metadata APIs.
595 // * It could be generalized to any `T: ?Sized` (not just trait object)
596 // if `{size,align}_of_for_meta<T: ?Sized>(T::Metadata)` are added.
597 // * Constructing a `ThinBox` without consuming and deallocating a `Box`
598 // requires either the unstable `Unsize` marker trait,
599 // or the unstable `unsized_locals` language feature,
600 // or taking `&dyn T` and restricting to `T: Copy`.
603 use std::marker::PhantomData;
607 T: ?Sized + Pointee<Metadata = DynMetadata<T>>,
609 ptr: NonNull<DynMetadata<T>>,
610 phantom: PhantomData<T>,
615 T: ?Sized + Pointee<Metadata = DynMetadata<T>>,
617 pub fn new<Value: std::marker::Unsize<T>>(value: Value) -> Self {
618 let unsized_: &T = &value;
619 let meta = metadata(unsized_);
620 let meta_layout = Layout::for_value(&meta);
621 let value_layout = Layout::for_value(&value);
622 let (layout, offset) = meta_layout.extend(value_layout).unwrap();
623 // `DynMetadata` is pointer-sized:
624 assert!(layout.size() > 0);
625 // If `ThinBox<T>` is generalized to any `T: ?Sized`,
626 // handle ZSTs with a dangling pointer without going through `alloc()`,
627 // like `Box<T>` does.
629 let ptr = NonNull::new(alloc(layout))
630 .unwrap_or_else(|| handle_alloc_error(layout))
631 .cast::<DynMetadata<T>>();
632 ptr.as_ptr().write(meta);
633 ptr.cast::<u8>().as_ptr().add(offset).cast::<Value>().write(value);
634 Self { ptr, phantom: PhantomData }
638 fn meta(&self) -> DynMetadata<T> {
639 unsafe { *self.ptr.as_ref() }
642 fn layout(&self) -> (Layout, usize) {
643 let meta = self.meta();
644 Layout::for_value(&meta).extend(meta.layout()).unwrap()
647 fn value_ptr(&self) -> *const T {
648 let (_, offset) = self.layout();
649 let data_ptr = unsafe { self.ptr.cast::<u8>().as_ptr().add(offset) };
650 ptr::from_raw_parts(data_ptr.cast(), self.meta())
653 fn value_mut_ptr(&mut self) -> *mut T {
654 let (_, offset) = self.layout();
655 // FIXME: can this line be shared with the same in `value_ptr()`
656 // without upsetting Stacked Borrows?
657 let data_ptr = unsafe { self.ptr.cast::<u8>().as_ptr().add(offset) };
658 from_raw_parts_mut(data_ptr.cast(), self.meta())
662 impl<T> std::ops::Deref for ThinBox<T>
664 T: ?Sized + Pointee<Metadata = DynMetadata<T>>,
668 fn deref(&self) -> &T {
669 unsafe { &*self.value_ptr() }
673 impl<T> std::ops::DerefMut for ThinBox<T>
675 T: ?Sized + Pointee<Metadata = DynMetadata<T>>,
677 fn deref_mut(&mut self) -> &mut T {
678 unsafe { &mut *self.value_mut_ptr() }
682 impl<T> std::ops::Drop for ThinBox<T>
684 T: ?Sized + Pointee<Metadata = DynMetadata<T>>,
687 let (layout, _) = self.layout();
689 drop_in_place::<T>(&mut **self);
690 dealloc(self.ptr.cast().as_ptr(), layout);
697 fn nonnull_tagged_pointer_with_provenance() {
698 let raw_pointer = Box::into_raw(Box::new(10));
700 let mut p = TaggedPointer::new(raw_pointer).unwrap();
701 assert_eq!(p.tag(), 0);
704 assert_eq!(p.tag(), 1);
705 assert_eq!(unsafe { *p.pointer().as_ptr() }, 10);
708 assert_eq!(p.tag(), 3);
709 assert_eq!(unsafe { *p.pointer().as_ptr() }, 10);
711 unsafe { Box::from_raw(p.pointer().as_ptr()) };
713 /// A non-null pointer type which carries several bits of metadata and maintains provenance.
715 pub struct TaggedPointer<T>(NonNull<T>);
717 impl<T> Clone for TaggedPointer<T> {
718 fn clone(&self) -> Self {
723 impl<T> Copy for TaggedPointer<T> {}
725 impl<T> TaggedPointer<T> {
726 /// The ABI-required minimum alignment of the `P` type.
727 pub const ALIGNMENT: usize = core::mem::align_of::<T>();
728 /// A mask for data-carrying bits of the address.
729 pub const DATA_MASK: usize = !Self::ADDRESS_MASK;
730 /// Number of available bits of storage in the address.
731 pub const NUM_BITS: u32 = Self::ALIGNMENT.trailing_zeros();
732 /// A mask for the non-data-carrying bits of the address.
733 pub const ADDRESS_MASK: usize = usize::MAX << Self::NUM_BITS;
735 /// Create a new tagged pointer from a possibly null pointer.
736 pub fn new(pointer: *mut T) -> Option<TaggedPointer<T>> {
737 Some(TaggedPointer(NonNull::new(pointer)?))
740 /// Consume this tagged pointer and produce a raw mutable pointer to the
742 pub fn pointer(self) -> NonNull<T> {
743 // SAFETY: The `addr` guaranteed to have bits set in the Self::ADDRESS_MASK, so the result will be non-null.
744 self.0.map_addr(|addr| unsafe {
745 NonZeroUsize::new_unchecked(addr.get() & Self::ADDRESS_MASK)
749 /// Consume this tagged pointer and produce the data it carries.
750 pub fn tag(&self) -> usize {
751 self.0.addr().get() & Self::DATA_MASK
754 /// Update the data this tagged pointer carries to a new value.
755 pub fn set_tag(&mut self, data: usize) {
757 data & Self::ADDRESS_MASK,
759 "cannot set more data beyond the lowest NUM_BITS"
761 let data = data & Self::DATA_MASK;
763 // SAFETY: This value will always be non-zero because the upper bits (from
764 // ADDRESS_MASK) will always be non-zero. This a property of the type and its
766 self.0 = self.0.map_addr(|addr| unsafe {
767 NonZeroUsize::new_unchecked((addr.get() & Self::ADDRESS_MASK) | data)