1 //@compile-flags: -Zmiri-permissive-provenance
5 // This strips provenance
6 fn transmute_ptr_to_int<T>(x: *const T) -> usize {
7 unsafe { std::mem::transmute(x) }
11 // Some casting-to-int with arithmetic.
12 let x = &42 as *const i32 as usize;
15 let z = y as u8 as usize;
16 assert_eq!(z, y % 256);
19 /// Test usize->ptr cast for dangling and OOB address.
20 /// That is safe, and thus has to work.
23 let x = &*b as *const i32 as usize;
25 let _val = x as *const i32;
28 let mut x = &*b as *const i32 as usize;
30 let _val = x as *const i32;
34 // Pointer string formatting! We can't check the output as it changes when libstd changes,
35 // but we can make sure Miri does not error.
36 format!("{:?}", &mut 13 as *mut _);
40 // Check that intptrcast is triggered for explicit casts and that it is consistent with
42 let a: *const i32 = &42;
43 let b = transmute_ptr_to_int(a) as u8;
49 let bytes = [0i8, 1, 2, 3, 4, 5, 6, 7, 8, 9];
50 let one = bytes.as_ptr().wrapping_offset(1);
51 let three = bytes.as_ptr().wrapping_offset(3);
52 let res = (one as usize) | (three as usize);
58 let addr = &val as *const _ as usize;
62 fn ptr_eq_dangling() {
64 let x = &*b as *const i32; // soon-to-be dangling
67 let y = &*b as *const i32; // different allocation
68 // They *could* be equal if memory was reused, but probably are not.
72 fn ptr_eq_out_of_bounds() {
74 let x = (&*b as *const i32).wrapping_sub(0x800); // out-of-bounds
76 let y = &*b as *const i32; // different allocation
77 // They *could* be equal (with the right base addresses), but probably are not.
81 fn ptr_eq_out_of_bounds_null() {
83 let x = (&*b as *const i32).wrapping_sub(0x800); // out-of-bounds
84 // This *could* be NULL (with the right base address), but probably is not.
85 assert!(x != std::ptr::null());
90 let x = &*b as *const i32;
91 // These *could* be equal (with the right base address), but probably are not.
92 assert!(x != 64 as *const i32);
95 fn zst_deref_of_dangling() {
97 let addr = &*b as *const _ as usize;
99 // Now if we cast `addr` to a ptr it might pick up the dangling provenance.
100 // But if we only do a ZST deref there is no UB here!
101 let zst = addr as *const ();
102 let _val = unsafe { *zst };
106 // Roundtrip a few functions through integers. Do this multiple times to make sure this does not
107 // work by chance. If we did not give unique addresses to ZST allocations -- which fn
108 // allocations are -- then we might be unable to cast back, or we might call the wrong function!
109 // Every function gets at most one address so doing a loop would not help...
135 fn0 as fn() -> i32 as *const () as usize,
136 fn1 as fn() -> i32 as *const () as usize,
137 fn2 as fn() -> i32 as *const () as usize,
138 fn3 as fn() -> i32 as *const () as usize,
139 fn4 as fn() -> i32 as *const () as usize,
140 fn5 as fn() -> i32 as *const () as usize,
141 fn6 as fn() -> i32 as *const () as usize,
142 fn7 as fn() -> i32 as *const () as usize,
144 for (idx, &addr) in fns.iter().enumerate() {
145 let fun: fn() -> i32 = unsafe { mem::transmute(addr as *const ()) };
146 assert_eq!(fun(), idx as i32);
158 ptr_eq_out_of_bounds();
159 ptr_eq_out_of_bounds_null();
161 zst_deref_of_dangling();