1 //@compile-flags: -Zmiri-strict-provenance
2 #![feature(new_uninit)]
3 #![feature(slice_as_chunks)]
4 #![feature(slice_partition_dedup)]
5 #![feature(layout_for_ptr)]
6 #![feature(strict_provenance)]
12 fn foo<T>(v: &[T]) -> Option<&[T]> {
13 let mut it = v.iter();
20 fn foo_mut<T>(v: &mut [T]) -> Option<&mut [T]> {
21 let mut it = v.iter_mut();
28 // In a slice of zero-size elements the pointer is meaningless.
29 // Ensure iteration still works even if the pointer is at the end of the address space.
30 let slice: &[()] = unsafe { slice::from_raw_parts(ptr::invalid(-5isize as usize), 10) };
31 assert_eq!(slice.len(), 10);
32 assert_eq!(slice.iter().count(), 10);
34 // .nth() on the iterator should also behave correctly
35 let mut it = slice.iter();
36 assert!(it.nth(5).is_some());
37 assert_eq!(it.count(), 4);
39 // Converting Iter to a slice should never have a null pointer
40 assert!(foo(slice).is_some());
42 // Test mutable iterators as well
43 let slice: &mut [()] =
44 unsafe { slice::from_raw_parts_mut(ptr::invalid_mut(-5isize as usize), 10) };
45 assert_eq!(slice.len(), 10);
46 assert_eq!(slice.iter_mut().count(), 10);
49 let mut it = slice.iter_mut();
50 assert!(it.nth(5).is_some());
51 assert_eq!(it.count(), 4);
54 assert!(foo_mut(slice).is_some())
57 fn test_iter_ref_consistency() {
60 fn test<T: Copy + Debug + PartialEq>(x: T) {
61 let v: &[T] = &[x, x, x];
62 let v_ptrs: [*const T; 3] = match v {
63 [ref v1, ref v2, ref v3] => [v1 as *const _, v2 as *const _, v3 as *const _],
70 assert_eq!(&v[i] as *const _, v_ptrs[i]); // check the v_ptrs array, just to be sure
71 let nth = v.iter().nth(i).unwrap();
72 assert_eq!(nth as *const _, v_ptrs[i]);
74 assert_eq!(v.iter().nth(len), None, "nth(len) should return None");
76 // stepping through with nth(0)
78 let mut it = v.iter();
80 let next = it.nth(0).unwrap();
81 assert_eq!(next as *const _, v_ptrs[i]);
83 assert_eq!(it.nth(0), None);
88 let mut it = v.iter();
90 let remaining = len - i;
91 assert_eq!(it.size_hint(), (remaining, Some(remaining)));
93 let next = it.next().unwrap();
94 assert_eq!(next as *const _, v_ptrs[i]);
96 assert_eq!(it.size_hint(), (0, Some(0)));
97 assert_eq!(it.next(), None, "The final call to next() should return None");
102 let mut it = v.iter();
104 let remaining = len - i;
105 assert_eq!(it.size_hint(), (remaining, Some(remaining)));
107 let prev = it.next_back().unwrap();
108 assert_eq!(prev as *const _, v_ptrs[remaining - 1]);
110 assert_eq!(it.size_hint(), (0, Some(0)));
111 assert_eq!(it.next_back(), None, "The final call to next_back() should return None");
115 fn test_mut<T: Copy + Debug + PartialEq>(x: T) {
116 let v: &mut [T] = &mut [x, x, x];
117 let v_ptrs: [*mut T; 3] = match v {
118 [ref v1, ref v2, ref v3] =>
119 [v1 as *const _ as *mut _, v2 as *const _ as *mut _, v3 as *const _ as *mut _],
126 assert_eq!(&mut v[i] as *mut _, v_ptrs[i]); // check the v_ptrs array, just to be sure
127 let nth = v.iter_mut().nth(i).unwrap();
128 assert_eq!(nth as *mut _, v_ptrs[i]);
130 assert_eq!(v.iter().nth(len), None, "nth(len) should return None");
132 // stepping through with nth(0)
134 let mut it = v.iter();
136 let next = it.nth(0).unwrap();
137 assert_eq!(next as *const _, v_ptrs[i]);
139 assert_eq!(it.nth(0), None);
144 let mut it = v.iter_mut();
146 let remaining = len - i;
147 assert_eq!(it.size_hint(), (remaining, Some(remaining)));
149 let next = it.next().unwrap();
150 assert_eq!(next as *mut _, v_ptrs[i]);
152 assert_eq!(it.size_hint(), (0, Some(0)));
153 assert_eq!(it.next(), None, "The final call to next() should return None");
158 let mut it = v.iter_mut();
160 let remaining = len - i;
161 assert_eq!(it.size_hint(), (remaining, Some(remaining)));
163 let prev = it.next_back().unwrap();
164 assert_eq!(prev as *mut _, v_ptrs[remaining - 1]);
166 assert_eq!(it.size_hint(), (0, Some(0)));
167 assert_eq!(it.next_back(), None, "The final call to next_back() should return None");
171 // Make sure iterators and slice patterns yield consistent addresses for various types,
175 test([0u32; 0]); // ZST with alignment > 0
178 test_mut([0u32; 0]); // ZST with alignment > 0
182 let mut values = Box::<[Box<u32>]>::new_uninit_slice(3);
184 let values = unsafe {
185 // Deferred initialization:
186 values[0].as_mut_ptr().write(Box::new(1));
187 values[1].as_mut_ptr().write(Box::new(2));
188 values[2].as_mut_ptr().write(Box::new(3));
193 assert_eq!(values.iter().map(|x| **x).collect::<Vec<_>>(), vec![1, 2, 3])
196 /// Regression tests for slice methods in the Rust core library where raw pointers are obtained
197 /// from mutable references.
198 fn test_for_invalidated_pointers() {
199 let mut buffer = [0usize; 64];
200 let len = buffer.len();
202 // These regression tests (indirectly) call every slice method which contains a `buffer.as_mut_ptr()`.
203 // `<[T]>::as_mut_ptr(&mut self)` takes a mutable reference (tagged Unique), which will invalidate all
204 // the other pointers that were previously derived from it according to the Stacked Borrows model.
205 // An example of where this could go wrong is a prior bug inside `<[T]>::copy_within`:
208 // core::ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count);
211 // The arguments to `core::ptr::copy` are evaluated from left to right. `self.as_ptr()` creates
212 // an immutable reference (which is tagged as `SharedReadOnly` by Stacked Borrows) to the array
213 // and derives a valid `*const` pointer from it. When jumping to the next argument,
214 // `self.as_mut_ptr()` creates a mutable reference (tagged as `Unique`) to the array, which
215 // invalidates the existing `SharedReadOnly` reference and any pointers derived from it.
216 // The invalidated `*const` pointer (the first argument to `core::ptr::copy`) is then used
217 // after the fact when `core::ptr::copy` is called, which triggers undefined behavior.
220 assert_eq!(0, *buffer.as_mut_ptr_range().start);
222 // Check that the pointer range is in-bounds, while we're at it
223 let range = buffer.as_mut_ptr_range();
225 assert_eq!(*range.start, *range.end.sub(len));
230 // Calls `fn as_chunks_unchecked_mut` internally (requires unstable `#![feature(slice_as_chunks)]`):
231 assert_eq!(2, buffer.as_chunks_mut::<32>().0.len());
232 for chunk in buffer.as_chunks_mut::<32>().0 {
238 // Calls `fn split_at_mut_unchecked` internally:
239 let split_mut = buffer.split_at_mut(32);
240 assert_eq!(split_mut.0, split_mut.1);
242 // Calls `fn partition_dedup_by` internally (requires unstable `#![feature(slice_partition_dedup)]`):
243 let partition_dedup = buffer.partition_dedup();
244 assert_eq!(1, partition_dedup.0.len());
245 partition_dedup.0[0] += 1;
246 for elem in partition_dedup.1 {
250 buffer.rotate_left(8);
251 buffer.rotate_right(16);
253 buffer.copy_from_slice(&[1usize; 64]);
254 buffer.swap_with_slice(&mut [2usize; 64]);
256 assert_eq!(0, unsafe { buffer.align_to_mut::<u8>().1[1] });
258 buffer.copy_within(1.., 0);
261 fn large_raw_slice() {
262 let size = isize::MAX as usize;
263 // Creating a raw slice of size isize::MAX and asking for its size is okay.
264 let s = std::ptr::slice_from_raw_parts(ptr::invalid::<u8>(1), size);
265 assert_eq!(size, unsafe { std::mem::size_of_val_raw(s) });
270 test_iter_ref_consistency();
272 test_for_invalidated_pointers();