1 //@compile-flags: -Zmiri-retag-fields
2 #![feature(allocator_api)]
5 // Test various stacked-borrows-related things.
7 read_does_not_invalidate1();
8 read_does_not_invalidate2();
9 mut_raw_then_mut_shr();
10 mut_shr_then_mut_raw();
12 partially_invalidate_mut();
14 direct_mut_to_const_raw();
17 disjoint_mutable_subborrows();
21 wide_raw_ptr_in_tuple();
22 not_unpin_not_protected();
25 // Make sure that reading from an `&mut` does, like reborrowing to `&`,
26 // NOT invalidate other reborrows.
27 fn read_does_not_invalidate1() {
28 fn foo(x: &mut (i32, i32)) -> &i32 {
29 let xraw = x as *mut (i32, i32);
30 let ret = unsafe { &(*xraw).1 };
31 let _val = x.1; // we just read, this does NOT invalidate the reborrows.
34 assert_eq!(*foo(&mut (1, 2)), 2);
36 // Same as above, but this time we first create a raw, then read from `&mut`
37 // and then freeze from the raw.
38 fn read_does_not_invalidate2() {
39 fn foo(x: &mut (i32, i32)) -> &i32 {
40 let xraw = x as *mut (i32, i32);
41 let _val = x.1; // we just read, this does NOT invalidate the raw reborrow.
42 let ret = unsafe { &(*xraw).1 };
45 assert_eq!(*foo(&mut (1, 2)), 2);
48 // Escape a mut to raw, then share the same mut and use the share, then the raw.
50 fn mut_raw_then_mut_shr() {
53 let xraw = &mut *xref as *mut _;
62 // Create first a shared reference and then a raw pointer from a `&mut`
63 // should permit mutation through that raw pointer.
64 fn mut_shr_then_mut_raw() {
67 let xraw = xref as *mut _;
74 // Ensure that if we derive from a mut a raw, and then from that a mut,
75 // and then read through the original mut, that does not invalidate the raw.
76 // This shows that the read-exception for `&mut` applies even if the `Shr` item
77 // on the stack is not at the top.
82 let xraw = xref1 as *mut _;
83 let _xref2 = unsafe { &mut *xraw };
88 // we can now use both xraw and xref1, for reading
89 assert_eq!(*xref1, 4);
90 assert_eq!(unsafe { *xraw }, 4);
91 assert_eq!(*xref1, 4);
92 assert_eq!(unsafe { *xraw }, 4);
93 // we cannot use xref2; see `compile-fail/stacked-borows/illegal_read4.rs`
98 fn partially_invalidate_mut() {
99 let data = &mut (0u8, 0u8);
100 let reborrow = &mut *data as *mut (u8, u8);
101 let shard = unsafe { &mut (*reborrow).0 };
102 data.1 += 1; // the deref overlaps with `shard`, but that is ok; the access does not overlap.
103 *shard += 1; // so we can still use `shard`.
104 assert_eq!(*data, (1, 1));
107 // Make sure that we can handle the situation where a loaction is frozen when being dropped.
108 fn drop_after_sharing() {
109 let x = String::from("hello!");
113 // Make sure that coercing &mut T to *const T produces a writeable pointer.
114 fn direct_mut_to_const_raw() {
115 // TODO: This is currently disabled, waiting on a decision on <https://github.com/rust-lang/rust/issues/56604>
117 let y: *const i32 = x;
118 unsafe { *(y as *mut i32) = 1; }
123 // Make sure that we can create two raw pointers from a mutable reference and use them both.
127 let y1 = x as *mut _;
128 let y2 = x as *mut _;
134 // Make sure that creating a *mut does not invalidate existing shared references.
139 let y1: &i32 = mem::transmute(&*x); // launder lifetimes
140 let y2 = x as *mut _;
146 fn disjoint_mutable_subborrows() {
152 unsafe fn borrow_field_a<'a>(this: *mut Foo) -> &'a mut String {
156 unsafe fn borrow_field_b<'a>(this: *mut Foo) -> &'a mut Vec<u32> {
160 let mut foo = Foo { a: "hello".into(), b: vec![0, 1, 2] };
162 let ptr = &mut foo as *mut Foo;
164 let a = unsafe { borrow_field_a(ptr) };
165 let b = unsafe { borrow_field_b(ptr) };
167 a.push_str(" world");
168 eprintln!("{:?} {:?}", a, b);
171 fn raw_ref_to_part() {
182 let it = Box::new(Whole { part: Part { _lame: 0 }, extra: 42 });
183 let whole = ptr::addr_of_mut!(*Box::leak(it));
184 let part = unsafe { ptr::addr_of_mut!((*whole).part) };
185 let typed = unsafe { &mut *(part as *mut Whole) };
186 assert!(typed.extra == 42);
187 drop(unsafe { Box::from_raw(whole) });
190 /// When casting an array reference to a raw element ptr, that should cover the whole array.
192 let mut x: [usize; 2] = [0, 0];
193 let p = &mut x as *mut usize;
198 let x: [usize; 2] = [0, 1];
199 let p = &x as *const usize;
200 assert_eq!(unsafe { *p.add(1) }, 1);
203 /// Transmuting &&i32 to &&mut i32 is fine.
207 let p = unsafe { core::mem::transmute::<&&i32, &&mut i32>(&y) };
212 fn wide_raw_ptr_in_tuple() {
213 let mut x: Box<dyn std::any::Any> = Box::new("ouch");
214 let r = &mut *x as *mut dyn std::any::Any;
215 // This triggers the visitor-based recursive retagging. It is *not* supposed to retag raw
216 // pointers, but then the visitor might recurse into the "fields" of a wide raw pointer and
217 // finds a reference (to a vtable) there that it wants to retag... and that would be Wrong.
219 let r = unsafe { &mut *pair.0 };
220 // Make sure the fn ptr part of the vtable is still fine.
224 fn not_unpin_not_protected() {
225 // `&mut !Unpin`, at least for now, does not get `noalias` nor `dereferenceable`, so we also
226 // don't add protectors. (We could, but until we have a better idea for where we want to go with
227 // the self-referntial-generator situation, it does not seem worth the potential trouble.)
228 use std::marker::PhantomPinned;
230 pub struct NotUnpin(i32, PhantomPinned);
232 fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) {
233 // `f` may mutate, but it may not deallocate!
237 inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| {
238 let raw = x as *mut _;
239 drop(unsafe { Box::from_raw(raw) });