6 pub trait EvalContextExt<'tcx> {
10 left: ImmTy<'tcx, Borrow>,
11 right: ImmTy<'tcx, Borrow>,
12 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)>;
14 fn ptr_int_arithmetic(
17 left: Pointer<Borrow>,
20 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)>;
25 right: Scalar<Borrow>,
26 ) -> EvalResult<'tcx, bool>;
28 fn pointer_offset_inbounds(
33 ) -> EvalResult<'tcx, Scalar<Borrow>>;
36 impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> {
40 left: ImmTy<'tcx, Borrow>,
41 right: ImmTy<'tcx, Borrow>,
42 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)> {
43 use rustc::mir::BinOp::*;
45 trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
47 // Operations that support fat pointers
50 let eq = match (*left, *right) {
51 (Immediate::Scalar(left), Immediate::Scalar(right)) =>
52 self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
53 (Immediate::ScalarPair(left1, left2), Immediate::ScalarPair(right1, right2)) =>
54 self.ptr_eq(left1.not_undef()?, right1.not_undef()?)? &&
55 self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
56 _ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
58 return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
63 // Now we expect no more fat pointers.
64 let left_layout = left.layout;
65 let left = left.to_scalar()?;
66 let right_layout = right.layout;
67 let right = right.to_scalar()?;
68 debug_assert!(left.is_ptr() || right.is_ptr() || bin_op == Offset);
72 let pointee_ty = left_layout.ty
74 .expect("Offset called on non-ptr type")
76 let ptr = self.pointer_offset_inbounds(
79 right.to_isize(self)?,
83 // These need both to be pointer, and fail if they are not in the same location
84 Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
85 let left = left.to_ptr().expect("we checked is_ptr");
86 let right = right.to_ptr().expect("we checked is_ptr");
87 if left.alloc_id == right.alloc_id {
88 let res = match bin_op {
89 Lt => left.offset < right.offset,
90 Le => left.offset <= right.offset,
91 Gt => left.offset > right.offset,
92 Ge => left.offset >= right.offset,
94 // subtract the offsets
95 let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
96 let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
97 let layout = self.layout_of(self.tcx.types.usize)?;
98 return self.binary_op(
100 ImmTy::from_scalar(left_offset, layout),
101 ImmTy::from_scalar(right_offset, layout),
104 _ => bug!("We already established it has to be one of these operators."),
106 Ok((Scalar::from_bool(res), false))
108 // Both are pointers, but from different allocations.
109 err!(InvalidPointerMath)
112 // These work if the left operand is a pointer, and the right an integer
113 Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
114 // Cast to i128 is fine as we checked the kind to be ptr-sized
115 self.ptr_int_arithmetic(
117 left.to_ptr().expect("we checked is_ptr"),
118 right.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
119 right_layout.abi.is_signed(),
122 // Commutative operators also work if the integer is on the left
123 Add | BitAnd if left.is_bits() && right.is_ptr() => {
124 // This is a commutative operation, just swap the operands
125 self.ptr_int_arithmetic(
127 right.to_ptr().expect("we checked is_ptr"),
128 left.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
129 left_layout.abi.is_signed(),
132 // Nothing else works
133 _ => err!(InvalidPointerMath),
139 left: Scalar<Borrow>,
140 right: Scalar<Borrow>,
141 ) -> EvalResult<'tcx, bool> {
142 let size = self.pointer_size();
143 Ok(match (left, right) {
144 (Scalar::Bits { .. }, Scalar::Bits { .. }) =>
145 left.to_bits(size)? == right.to_bits(size)?,
146 (Scalar::Ptr(left), Scalar::Ptr(right)) => {
147 // Comparison illegal if one of them is out-of-bounds, *unless* they
148 // are in the same allocation.
149 if left.alloc_id == right.alloc_id {
150 left.offset == right.offset
152 // This accepts one-past-the end. Thus, there is still technically
153 // some non-determinism that we do not fully rule out when two
154 // allocations sit right next to each other. The C/C++ standards are
155 // somewhat fuzzy about this case, so pragmatically speaking I think
156 // for now this check is "good enough".
157 // FIXME: Once we support intptrcast, we could try to fix these holes.
158 // Dead allocations in miri cannot overlap with live allocations, but
159 // on read hardware this can easily happen. Thus for comparisons we require
160 // both pointers to be live.
161 self.memory().check_bounds_ptr(left, InboundsCheck::Live)?;
162 self.memory().check_bounds_ptr(right, InboundsCheck::Live)?;
163 // Two in-bounds pointers, we can compare across allocations.
167 // Comparing ptr and integer.
168 (Scalar::Ptr(ptr), Scalar::Bits { bits, size }) |
169 (Scalar::Bits { bits, size }, Scalar::Ptr(ptr)) => {
170 assert_eq!(size as u64, self.pointer_size().bytes());
171 let bits = bits as u64;
173 // Case I: Comparing real pointers with "small" integers.
174 // Really we should only do this for NULL, but pragmatically speaking on non-bare-metal systems,
175 // an allocation will never be at the very bottom of the address space.
176 // Such comparisons can arise when comparing empty slices, which sometimes are "fake"
177 // integer pointers (okay because the slice is empty) and sometimes point into a
179 // The most common source of such integer pointers is `NonNull::dangling()`, which
180 // equals the type's alignment. i128 might have an alignment of 16 bytes, but few types have
181 // alignment 32 or higher, hence the limit of 32.
182 // FIXME: Once we support intptrcast, we could try to fix these holes.
184 // Test if the ptr is in-bounds. Then it cannot be NULL.
185 // Even dangling pointers cannot be NULL.
186 if self.memory().check_bounds_ptr(ptr, InboundsCheck::MaybeDead).is_ok() {
191 let (alloc_size, alloc_align) = self.memory()
192 .get_size_and_align(ptr.alloc_id, InboundsCheck::MaybeDead)
193 .expect("determining size+align of dead ptr cannot fail");
195 // Case II: Alignment gives it away
196 if ptr.offset.bytes() % alloc_align.bytes() == 0 {
197 // The offset maintains the allocation alignment, so we know `base+offset`
198 // is aligned by `alloc_align`.
199 // FIXME: We could be even more general, e.g., offset 2 into a 4-aligned
200 // allocation cannot equal 3.
201 if bits % alloc_align.bytes() != 0 {
202 // The integer is *not* aligned. So they cannot be equal.
206 // Case III: The integer is too big, and the allocation goes on a bit
207 // without wrapping around the address space.
209 // Compute the highest address at which this allocation could live.
210 // Substract one more, because it must be possible to add the size
211 // to the base address without overflowing; that is, the very last address
212 // of the address space is never dereferencable (but it can be in-bounds, i.e.,
213 // one-past-the-end).
215 ((1u128 << self.pointer_size().bits())
216 - u128::from(alloc_size.bytes())
219 if let Some(max_addr) = max_base_addr.checked_add(ptr.offset.bytes()) {
221 // The integer is too big, this cannot possibly be equal.
227 // None of the supported cases.
228 return err!(InvalidPointerMath);
233 fn ptr_int_arithmetic(
236 left: Pointer<Borrow>,
239 ) -> EvalResult<'tcx, (Scalar<Borrow>, bool)> {
240 use rustc::mir::BinOp::*;
242 fn map_to_primval((res, over): (Pointer<Borrow>, bool)) -> (Scalar<Borrow>, bool) {
243 (Scalar::Ptr(res), over)
248 // The only way this can overflow is by underflowing, so signdeness of the right
249 // operands does not matter.
250 map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
252 map_to_primval(left.overflowing_signed_offset(right as i128, self)),
254 map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
256 BitAnd if !signed => {
257 let ptr_base_align = self.memory().get(left.alloc_id)?.align.bytes();
259 // FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
260 let shift = 128 - self.memory().pointer_size().bits();
261 let value = !(ptr_base_align as u128 - 1);
262 // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
263 (value << shift) >> shift
265 let ptr_size = self.memory().pointer_size().bytes() as u8;
266 trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
267 ptr_base_align, right, base_mask);
268 if right & base_mask == base_mask {
269 // Case 1: the base address bits are all preserved, i.e., right is all-1 there.
270 let offset = (left.offset.bytes() as u128 & right) as u64;
272 Scalar::Ptr(Pointer::new_with_tag(
274 Size::from_bytes(offset),
279 } else if right & base_mask == 0 {
280 // Case 2: the base address bits are all taken away, i.e., right is all-0 there.
281 (Scalar::Bits { bits: (left.offset.bytes() as u128) & right, size: ptr_size }, false)
283 return err!(ReadPointerAsBytes);
288 // Doing modulo a divisor of the alignment is allowed.
289 // (Intuition: modulo a divisor leaks less information.)
290 let ptr_base_align = self.memory().get(left.alloc_id)?.align.bytes();
291 let right = right as u64;
292 let ptr_size = self.memory().pointer_size().bytes() as u8;
294 // Modulo 1 is always 0.
295 (Scalar::Bits { bits: 0, size: ptr_size }, false)
296 } else if ptr_base_align % right == 0 {
297 // The base address would be cancelled out by the modulo operation, so we can
298 // just take the modulo of the offset.
301 bits: (left.offset.bytes() % right) as u128,
307 return err!(ReadPointerAsBytes);
313 "unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
317 if signed { "signed" } else { "unsigned" }
319 return err!(Unimplemented(msg));
324 /// Raises an error if the offset moves the pointer outside of its allocation.
325 /// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
326 /// moves in there because the size is 0). We also consider the NULL pointer its own separate
327 /// allocation, and all the remaining integers pointers their own allocation.
328 fn pointer_offset_inbounds(
331 pointee_ty: Ty<'tcx>,
333 ) -> EvalResult<'tcx, Scalar<Borrow>> {
334 // FIXME: assuming here that type size is less than `i64::max_value()`.
335 let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
337 .checked_mul(pointee_size)
338 .ok_or_else(|| InterpError::Overflow(mir::BinOp::Mul))?;
339 // Now let's see what kind of pointer this is.
340 if let Scalar::Ptr(ptr) = ptr {
341 // Both old and new pointer must be in-bounds of a *live* allocation.
342 // (Of the same allocation, but that part is trivial with our representation.)
343 self.memory().check_bounds_ptr(ptr, InboundsCheck::Live)?;
344 let ptr = ptr.signed_offset(offset, self)?;
345 self.memory().check_bounds_ptr(ptr, InboundsCheck::Live)?;
348 // An integer pointer. They can only be offset by 0, and we pretend there
349 // is a little zero-sized allocation here.
353 err!(InvalidPointerMath)