1 use core::cell::RefCell;
5 fn test_const_from_raw_parts() {
6 const SLICE: &[u8] = &[1, 2, 3, 4];
7 const FROM_RAW: &[u8] = unsafe { &*slice_from_raw_parts(SLICE.as_ptr(), SLICE.len()) };
8 assert_eq!(SLICE, FROM_RAW);
10 let slice = &[1, 2, 3, 4, 5];
11 let from_raw = unsafe { &*slice_from_raw_parts(slice.as_ptr(), 2) };
12 assert_eq!(&slice[..2], from_raw);
22 let mut p = Pair { fst: 10, snd: 20 };
23 let pptr: *mut Pair = &mut p;
24 let iptr: *mut isize = pptr as *mut isize;
25 assert_eq!(*iptr, 10);
27 assert_eq!(*iptr, 30);
28 assert_eq!(p.fst, 30);
30 *pptr = Pair { fst: 50, snd: 60 };
31 assert_eq!(*iptr, 50);
32 assert_eq!(p.fst, 50);
33 assert_eq!(p.snd, 60);
35 let v0 = vec![32000u16, 32001u16, 32002u16];
36 let mut v1 = vec![0u16, 0u16, 0u16];
38 copy(v0.as_ptr().offset(1), v1.as_mut_ptr().offset(1), 1);
39 assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16));
40 copy(v0.as_ptr().offset(2), v1.as_mut_ptr(), 1);
41 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 0u16));
42 copy(v0.as_ptr(), v1.as_mut_ptr().offset(2), 1);
43 assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 32000u16));
49 let p: *const isize = null();
52 let q = p.wrapping_offset(1);
53 assert!(!q.is_null());
55 let mp: *mut isize = null_mut();
56 assert!(mp.is_null());
58 let mq = mp.wrapping_offset(1);
59 assert!(!mq.is_null());
61 // Pointers to unsized types -- slices
62 let s: &mut [u8] = &mut [1, 2, 3];
63 let cs: *const [u8] = s;
64 assert!(!cs.is_null());
66 let ms: *mut [u8] = s;
67 assert!(!ms.is_null());
69 let cz: *const [u8] = &[];
70 assert!(!cz.is_null());
72 let mz: *mut [u8] = &mut [];
73 assert!(!mz.is_null());
75 let ncs: *const [u8] = null::<[u8; 3]>();
76 assert!(ncs.is_null());
78 let nms: *mut [u8] = null_mut::<[u8; 3]>();
79 assert!(nms.is_null());
81 // Pointers to unsized types -- trait objects
82 let ci: *const dyn ToString = &3;
83 assert!(!ci.is_null());
85 let mi: *mut dyn ToString = &mut 3;
86 assert!(!mi.is_null());
88 let nci: *const dyn ToString = null::<isize>();
89 assert!(nci.is_null());
91 let nmi: *mut dyn ToString = null_mut::<isize>();
92 assert!(nmi.is_null());
98 let p: *const isize = null();
99 assert_eq!(p.as_ref(), None);
101 let q: *const isize = &2;
102 assert_eq!(q.as_ref().unwrap(), &2);
104 let p: *mut isize = null_mut();
105 assert_eq!(p.as_ref(), None);
107 let q: *mut isize = &mut 2;
108 assert_eq!(q.as_ref().unwrap(), &2);
110 // Lifetime inference
113 let p = &u as *const isize;
114 assert_eq!(p.as_ref().unwrap(), &2);
117 // Pointers to unsized types -- slices
118 let s: &mut [u8] = &mut [1, 2, 3];
119 let cs: *const [u8] = s;
120 assert_eq!(cs.as_ref(), Some(&*s));
122 let ms: *mut [u8] = s;
123 assert_eq!(ms.as_ref(), Some(&*s));
125 let cz: *const [u8] = &[];
126 assert_eq!(cz.as_ref(), Some(&[][..]));
128 let mz: *mut [u8] = &mut [];
129 assert_eq!(mz.as_ref(), Some(&[][..]));
131 let ncs: *const [u8] = null::<[u8; 3]>();
132 assert_eq!(ncs.as_ref(), None);
134 let nms: *mut [u8] = null_mut::<[u8; 3]>();
135 assert_eq!(nms.as_ref(), None);
137 // Pointers to unsized types -- trait objects
138 let ci: *const dyn ToString = &3;
139 assert!(ci.as_ref().is_some());
141 let mi: *mut dyn ToString = &mut 3;
142 assert!(mi.as_ref().is_some());
144 let nci: *const dyn ToString = null::<isize>();
145 assert!(nci.as_ref().is_none());
147 let nmi: *mut dyn ToString = null_mut::<isize>();
148 assert!(nmi.as_ref().is_none());
155 let p: *mut isize = null_mut();
156 assert!(p.as_mut() == None);
158 let q: *mut isize = &mut 2;
159 assert!(q.as_mut().unwrap() == &mut 2);
161 // Lifetime inference
164 let p = &mut u as *mut isize;
165 assert!(p.as_mut().unwrap() == &mut 2);
168 // Pointers to unsized types -- slices
169 let s: &mut [u8] = &mut [1, 2, 3];
170 let ms: *mut [u8] = s;
171 assert_eq!(ms.as_mut(), Some(&mut [1, 2, 3][..]));
173 let mz: *mut [u8] = &mut [];
174 assert_eq!(mz.as_mut(), Some(&mut [][..]));
176 let nms: *mut [u8] = null_mut::<[u8; 3]>();
177 assert_eq!(nms.as_mut(), None);
179 // Pointers to unsized types -- trait objects
180 let mi: *mut dyn ToString = &mut 3;
181 assert!(mi.as_mut().is_some());
183 let nmi: *mut dyn ToString = null_mut::<isize>();
184 assert!(nmi.as_mut().is_none());
189 fn test_ptr_addition() {
191 let xs = vec![5; 16];
192 let mut ptr = xs.as_ptr();
193 let end = ptr.offset(16);
201 let mut m_ptr = xs_mut.as_mut_ptr();
202 let m_end = m_ptr.offset(16);
204 while m_ptr < m_end {
206 m_ptr = m_ptr.offset(1);
209 assert!(xs_mut == vec![10; 16]);
214 fn test_ptr_subtraction() {
216 let xs = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
218 let ptr = xs.as_ptr();
221 assert_eq!(*(ptr.offset(idx as isize)), idx as isize);
226 let m_start = xs_mut.as_mut_ptr();
227 let mut m_ptr = m_start.offset(9);
231 if m_ptr == m_start {
234 m_ptr = m_ptr.offset(-1);
237 assert_eq!(xs_mut, [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
242 fn test_set_memory() {
243 let mut xs = [0u8; 20];
244 let ptr = xs.as_mut_ptr();
246 write_bytes(ptr, 5u8, xs.len());
248 assert!(xs == [5u8; 20]);
252 fn test_unsized_nonnull() {
253 let xs: &[i32] = &[1, 2, 3];
254 let ptr = unsafe { NonNull::new_unchecked(xs as *const [i32] as *mut [i32]) };
255 let ys = unsafe { ptr.as_ref() };
256 let zs: &[i32] = &[1, 2, 3];
262 // Have a symbol for the test below. It doesn’t need to be an actual variadic function, match the
263 // ABI, or even point to an actual executable code, because the function itself is never invoked.
265 pub fn test_variadic_fnptr() {
266 use core::hash::{Hash, SipHasher};
268 fn test_variadic_fnptr(_: u64, ...) -> f64;
270 let p: unsafe extern "C" fn(u64, ...) -> f64 = test_variadic_fnptr;
274 let mut s = SipHasher::new();
275 assert_eq!(p.hash(&mut s), q.hash(&mut s));
279 fn write_unaligned_drop() {
281 static DROPS: RefCell<Vec<u32>> = RefCell::new(Vec::new());
286 impl Drop for Dropper {
288 DROPS.with(|d| d.borrow_mut().push(self.0));
294 let mut t = Dropper(1);
296 write_unaligned(&mut t, c);
299 DROPS.with(|d| assert_eq!(*d.borrow(), [0]));
303 #[cfg_attr(miri, ignore)] // Miri does not compute a maximal `mid` for `align_offset`
304 fn align_offset_zst() {
305 // For pointers of stride = 0, the pointer is already aligned or it cannot be aligned at
306 // all, because no amount of elements will align the pointer.
309 assert_eq!((p as *const ()).align_offset(p), 0);
311 assert_eq!(((p + 1) as *const ()).align_offset(p), !0);
313 p = (p + 1).next_power_of_two();
318 #[cfg_attr(miri, ignore)] // Miri does not compute a maximal `mid` for `align_offset`
319 fn align_offset_stride1() {
320 // For pointers of stride = 1, the pointer can always be aligned. The offset is equal to
324 for ptr in 1..2 * align {
325 let expected = ptr % align;
326 let offset = if expected == 0 { 0 } else { align - expected };
328 (ptr as *const u8).align_offset(align),
330 "ptr = {}, align = {}, size = 1",
335 align = (align + 1).next_power_of_two();
340 #[cfg_attr(miri, ignore)] // Miri is too slow
341 fn align_offset_weird_strides() {
350 struct A7(u32, u16, u8);
354 struct A9(u32, u32, u8);
356 struct A10(u32, u32, u16);
358 unsafe fn test_weird_stride<T>(ptr: *const T, align: usize) -> bool {
359 let numptr = ptr as usize;
360 let mut expected = usize::max_value();
361 // Naive but definitely correct way to find the *first* aligned element of stride::<T>.
363 if (numptr + el * ::std::mem::size_of::<T>()) % align == 0 {
368 let got = ptr.align_offset(align);
371 "aligning {:p} (with stride of {}) to {}, expected {}, got {}",
373 ::std::mem::size_of::<T>(),
383 // For pointers of stride != 1, we verify the algorithm against the naivest possible
388 for ptr in 1usize..4 * align {
390 x |= test_weird_stride::<A3>(ptr as *const A3, align);
391 x |= test_weird_stride::<A4>(ptr as *const A4, align);
392 x |= test_weird_stride::<A5>(ptr as *const A5, align);
393 x |= test_weird_stride::<A6>(ptr as *const A6, align);
394 x |= test_weird_stride::<A7>(ptr as *const A7, align);
395 x |= test_weird_stride::<A8>(ptr as *const A8, align);
396 x |= test_weird_stride::<A9>(ptr as *const A9, align);
397 x |= test_weird_stride::<A10>(ptr as *const A10, align);
400 align = (align + 1).next_power_of_two();