1 use rustc::ty::{Ty, layout::TyLayout};
6 pub trait EvalContextExt<'tcx> {
11 left_layout: TyLayout<'tcx>,
12 right: Scalar<Borrow>,
13 right_layout: TyLayout<'tcx>,
14 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)>;
16 fn ptr_int_arithmetic(
19 left: Pointer<Borrow>,
22 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)>;
27 right: Scalar<Borrow>,
29 ) -> EvalResult<'tcx, bool>;
31 fn pointer_offset_inbounds(
36 ) -> EvalResult<'tcx, Scalar<Borrow>>;
39 impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> {
44 left_layout: TyLayout<'tcx>,
45 right: Scalar<Borrow>,
46 right_layout: TyLayout<'tcx>,
47 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)> {
48 use rustc::mir::BinOp::*;
50 trace!("ptr_op: {:?} {:?} {:?}", left, bin_op, right);
51 debug_assert!(left.is_ptr() || right.is_ptr() || bin_op == Offset);
55 let pointee_ty = left_layout.ty
57 .expect("Offset called on non-ptr type")
59 let ptr = self.pointer_offset_inbounds(
62 right.to_isize(self)?,
66 // These work on anything
68 Ok((Scalar::from_bool(self.ptr_eq(left, right, left_layout.size)?), false)),
70 Ok((Scalar::from_bool(!self.ptr_eq(left, right, left_layout.size)?), false)),
71 // These need both to be pointer, and fail if they are not in the same location
72 Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
73 let left = left.to_ptr().expect("we checked is_ptr");
74 let right = right.to_ptr().expect("we checked is_ptr");
75 if left.alloc_id == right.alloc_id {
76 let res = match bin_op {
77 Lt => left.offset < right.offset,
78 Le => left.offset <= right.offset,
79 Gt => left.offset > right.offset,
80 Ge => left.offset >= right.offset,
82 // subtract the offsets
83 let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
84 let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
85 let layout = self.layout_of(self.tcx.types.usize)?;
86 return self.binary_op(
92 _ => bug!("We already established it has to be one of these operators."),
94 Ok((Scalar::from_bool(res), false))
96 // Both are pointers, but from different allocations.
97 err!(InvalidPointerMath)
100 // These work if the left operand is a pointer, and the right an integer
101 Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
102 // Cast to i128 is fine as we checked the kind to be ptr-sized
103 self.ptr_int_arithmetic(
105 left.to_ptr().expect("we checked is_ptr"),
106 right.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
107 right_layout.abi.is_signed(),
110 // Commutative operators also work if the integer is on the left
111 Add | BitAnd if left.is_bits() && right.is_ptr() => {
112 // This is a commutative operation, just swap the operands
113 self.ptr_int_arithmetic(
115 right.to_ptr().expect("we checked is_ptr"),
116 left.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
117 left_layout.abi.is_signed(),
120 // Nothing else works
121 _ => err!(InvalidPointerMath),
127 left: Scalar<Borrow>,
128 right: Scalar<Borrow>,
130 ) -> EvalResult<'tcx, bool> {
131 Ok(match (left, right) {
132 (Scalar::Bits { .. }, Scalar::Bits { .. }) =>
133 left.to_bits(size)? == right.to_bits(size)?,
134 (Scalar::Ptr(left), Scalar::Ptr(right)) => {
135 // Comparison illegal if one of them is out-of-bounds, *unless* they
136 // are in the same allocation.
137 if left.alloc_id == right.alloc_id {
138 left.offset == right.offset
140 // This accepts one-past-the end. So technically there is still
141 // some non-determinism that we do not fully rule out when two
142 // allocations sit right next to each other. The C/C++ standards are
143 // somewhat fuzzy about this case, so I think for now this check is
145 // Dead allocations in miri cannot overlap with live allocations, but
146 // on read hardware this can easily happen. Thus for comparisons we require
147 // both pointers to be live.
148 self.memory().get(left.alloc_id)?.check_bounds_ptr(left)?;
149 self.memory().get(right.alloc_id)?.check_bounds_ptr(right)?;
150 // Two in-bounds pointers, we can compare across allocations
154 // Comparing ptr and integer
155 (Scalar::Ptr(ptr), Scalar::Bits { bits, size }) |
156 (Scalar::Bits { bits, size }, Scalar::Ptr(ptr)) => {
157 assert_eq!(size as u64, self.pointer_size().bytes());
158 let bits = bits as u64;
160 // Case I: Comparing with NULL
162 // Test if the ptr is in-bounds. Then it cannot be NULL.
163 // Even dangling pointers cannot be NULL.
164 if self.memory().check_bounds_ptr_maybe_dead(ptr).is_ok() {
169 let (alloc_size, alloc_align) = self.memory().get_size_and_align(ptr.alloc_id);
171 // Case II: Alignment gives it away
172 if ptr.offset.bytes() % alloc_align.bytes() == 0 {
173 // The offset maintains the allocation alignment, so we know `base+offset`
174 // is aligned by `alloc_align`.
175 // FIXME: We could be even more general, e.g. offset 2 into a 4-aligned
176 // allocation cannot equal 3.
177 if bits % alloc_align.bytes() != 0 {
178 // The integer is *not* aligned. So they cannot be equal.
182 // Case III: The integer is too big, and the allocation goes on a bit
183 // without wrapping around the address space.
185 // Compute the highest address at which this allocation could live.
186 // Substract one more, because it must be possible to add the size
187 // to the base address without overflowing -- IOW, the very last address
188 // of the address space is never dereferencable (but it can be in-bounds, i.e.,
189 // one-past-the-end).
191 ((1u128 << self.pointer_size().bits())
192 - u128::from(alloc_size.bytes())
195 if let Some(max_addr) = max_base_addr.checked_add(ptr.offset.bytes()) {
197 // The integer is too big, this cannot possibly be equal
203 // None of the supported cases.
204 return err!(InvalidPointerMath);
209 fn ptr_int_arithmetic(
212 left: Pointer<Borrow>,
215 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)> {
216 use rustc::mir::BinOp::*;
218 fn map_to_primval((res, over): (Pointer<Borrow>, bool)) -> (Scalar<Borrow>, bool) {
219 (Scalar::Ptr(res), over)
224 // The only way this can overflow is by underflowing, so signdeness of the right operands does not matter
225 map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
227 map_to_primval(left.overflowing_signed_offset(right as i128, self)),
229 map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
231 BitAnd if !signed => {
232 let ptr_base_align = self.memory().get(left.alloc_id)?.align.bytes();
234 // FIXME: Use interpret::truncate, once that takes a Size instead of a Layout
235 let shift = 128 - self.memory().pointer_size().bits();
236 let value = !(ptr_base_align as u128 - 1);
237 // truncate (shift left to drop out leftover values, shift right to fill with zeroes)
238 (value << shift) >> shift
240 let ptr_size = self.memory().pointer_size().bytes() as u8;
241 trace!("Ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
242 ptr_base_align, right, base_mask);
243 if right & base_mask == base_mask {
244 // Case 1: The base address bits are all preserved, i.e., right is all-1 there
245 let offset = (left.offset.bytes() as u128 & right) as u64;
247 Scalar::Ptr(Pointer::new_with_tag(
249 Size::from_bytes(offset),
254 } else if right & base_mask == 0 {
255 // Case 2: The base address bits are all taken away, i.e., right is all-0 there
256 (Scalar::Bits { bits: (left.offset.bytes() as u128) & right, size: ptr_size }, false)
258 return err!(ReadPointerAsBytes);
263 // Doing modulo a divisor of the alignment is allowed.
264 // (Intuition: Modulo a divisor leaks less information.)
265 let ptr_base_align = self.memory().get(left.alloc_id)?.align.bytes();
266 let right = right as u64;
267 let ptr_size = self.memory().pointer_size().bytes() as u8;
269 // modulo 1 is always 0
270 (Scalar::Bits { bits: 0, size: ptr_size }, false)
271 } else if ptr_base_align % right == 0 {
272 // the base address would be cancelled out by the modulo operation, so we can
273 // just take the modulo of the offset
274 (Scalar::Bits { bits: (left.offset.bytes() % right) as u128, size: ptr_size }, false)
276 return err!(ReadPointerAsBytes);
281 let msg = format!("unimplemented binary op on pointer {:?}: {:?}, {:?} ({})", bin_op, left, right, if signed { "signed" } else { "unsigned" });
282 return err!(Unimplemented(msg));
287 /// This function raises an error if the offset moves the pointer outside of its allocation. We consider
288 /// ZSTs their own huge allocation that doesn't overlap with anything (and nothing moves in there because the size is 0).
289 /// We also consider the NULL pointer its own separate allocation, and all the remaining integers pointers their own
291 fn pointer_offset_inbounds(
294 pointee_ty: Ty<'tcx>,
296 ) -> EvalResult<'tcx, Scalar<Borrow>> {
297 // FIXME: assuming here that type size is < i64::max_value()
298 let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
299 let offset = offset.checked_mul(pointee_size).ok_or_else(|| EvalErrorKind::Overflow(mir::BinOp::Mul))?;
300 // Now let's see what kind of pointer this is
301 if let Scalar::Ptr(ptr) = ptr {
302 // Both old and new pointer must be in-bounds of a *live* allocation.
303 // (Of the same allocation, but that part is trivial with our representation.)
304 let alloc = self.memory().get(ptr.alloc_id)?;
305 alloc.check_bounds_ptr(ptr)?;
306 let ptr = ptr.signed_offset(offset, self)?;
307 alloc.check_bounds_ptr(ptr)?;
310 // An integer pointer. They can only be offset by 0, and we pretend there
311 // is a little zero-sized allocation here.
315 err!(InvalidPointerMath)