1 use core::alloc::{AllocError, Allocator, Layout};
3 use core::mem::MaybeUninit;
4 use core::ptr::NonNull;
7 fn uninitialized_zero_size_box() {
9 &*Box::<()>::new_uninit() as *const _,
10 NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
13 Box::<[()]>::new_uninit_slice(4).as_ptr(),
14 NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
17 Box::<[String]>::new_uninit_slice(0).as_ptr(),
18 NonNull::<MaybeUninit<String>>::dangling().as_ptr(),
22 #[derive(Clone, PartialEq, Eq, Debug)]
28 fn box_clone_and_clone_from_equivalence() {
29 for size in (0..8).map(|i| 2usize.pow(i)) {
30 let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
31 let clone = control.clone();
32 let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
33 copy.clone_from(&control);
34 assert_eq!(control, clone);
35 assert_eq!(control, copy);
39 /// This test might give a false positive in case the box reallocates,
40 /// but the allocator keeps the original pointer.
42 /// On the other hand, it won't give a false negative: If it fails, then the
43 /// memory was definitely not reused.
45 fn box_clone_from_ptr_stability() {
46 for size in (0..8).map(|i| 2usize.pow(i)) {
47 let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
48 let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
49 let copy_raw = copy.as_ptr() as usize;
50 copy.clone_from(&control);
51 assert_eq!(copy.as_ptr() as usize, copy_raw);
57 let x = Box::new(Cell::new(5));
59 assert_eq!(x.get(), 1000);
62 pub struct ConstAllocator;
64 unsafe impl const Allocator for ConstAllocator {
65 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
67 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
69 let ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
70 Ok(NonNull::new_unchecked(ptr as *mut [u8; 0] as *mut [u8]))
75 unsafe fn deallocate(&self, _ptr: NonNull<u8>, layout: Layout) {
77 0 => { /* do nothing */ }
78 _ => { /* do nothing too */ }
82 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
83 let ptr = self.allocate(layout)?;
84 if layout.size() > 0 {
86 ptr.as_mut_ptr().write_bytes(0, layout.size());
97 ) -> Result<NonNull<[u8]>, AllocError> {
99 new_layout.size() >= old_layout.size(),
100 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
103 let new_ptr = self.allocate(new_layout)?;
104 if new_layout.size() > 0 {
105 new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), old_layout.size());
106 self.deallocate(ptr, old_layout);
111 unsafe fn grow_zeroed(
116 ) -> Result<NonNull<[u8]>, AllocError> {
117 let new_ptr = self.grow(ptr, old_layout, new_layout)?;
118 if new_layout.size() > 0 {
119 let old_size = old_layout.size();
120 let new_size = new_layout.size();
121 let raw_ptr = new_ptr.as_mut_ptr();
122 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
132 ) -> Result<NonNull<[u8]>, AllocError> {
134 new_layout.size() <= old_layout.size(),
135 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
138 let new_ptr = self.allocate(new_layout)?;
139 if new_layout.size() > 0 {
140 new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), new_layout.size());
141 self.deallocate(ptr, old_layout);
146 fn by_ref(&self) -> &Self
157 let mut boxed = Box::new_in(1u32, ConstAllocator);
158 assert!(*boxed == 1);
161 assert!(*boxed == 42);
166 assert!(VALUE == 42);