1 use rustc::ty::{Ty, layout::{Size, LayoutOf}};
6 pub trait EvalContextExt<'tcx> {
10 ) -> InterpResult<'tcx>;
15 left: ImmTy<'tcx, Tag>,
16 right: ImmTy<'tcx, Tag>,
17 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
19 fn ptr_int_arithmetic(
25 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
31 ) -> InterpResult<'tcx, bool>;
33 fn pointer_offset_inbounds(
38 ) -> InterpResult<'tcx, Scalar<Tag>>;
41 impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
42 /// Test if the pointer is in-bounds of a live allocation.
44 fn pointer_inbounds(&self, ptr: Pointer<Tag>) -> InterpResult<'tcx> {
45 let (size, _align) = self.memory().get_size_and_align(ptr.alloc_id, AllocCheck::Live)?;
46 ptr.check_in_alloc(size, CheckInAllocMsg::InboundsTest)
52 left: ImmTy<'tcx, Tag>,
53 right: ImmTy<'tcx, Tag>,
54 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
55 use rustc::mir::BinOp::*;
57 trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
59 // Operations that support fat pointers
62 let eq = match (*left, *right) {
63 (Immediate::Scalar(left), Immediate::Scalar(right)) =>
64 self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
65 (Immediate::ScalarPair(left1, left2), Immediate::ScalarPair(right1, right2)) =>
66 self.ptr_eq(left1.not_undef()?, right1.not_undef()?)? &&
67 self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
68 _ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
70 return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
75 // Now we expect no more fat pointers.
76 let left_layout = left.layout;
77 let left = left.to_scalar()?;
78 let right_layout = right.layout;
79 let right = right.to_scalar()?;
83 let pointee_ty = left_layout.ty
85 .expect("Offset called on non-ptr type")
87 let ptr = self.pointer_offset_inbounds(
90 right.to_isize(self)?,
94 // These need both to be pointer, and fail if they are not in the same location
95 Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
96 let left = left.assert_ptr();
97 let right = right.assert_ptr();
98 if left.alloc_id == right.alloc_id {
99 let res = match bin_op {
100 Lt => left.offset < right.offset,
101 Le => left.offset <= right.offset,
102 Gt => left.offset > right.offset,
103 Ge => left.offset >= right.offset,
105 // subtract the offsets
106 let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
107 let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
108 let layout = self.layout_of(self.tcx.types.usize)?;
109 return self.binary_op(
111 ImmTy::from_scalar(left_offset, layout),
112 ImmTy::from_scalar(right_offset, layout),
115 _ => bug!("We already established it has to be one of these operators."),
117 (Scalar::from_bool(res), false)
119 // Both are pointers, but from different allocations.
120 throw_unsup!(InvalidPointerMath)
123 Lt | Le | Gt | Ge if left.is_bits() && right.is_bits() => {
124 let left = left.assert_bits(self.memory().pointer_size());
125 let right = right.assert_bits(self.memory().pointer_size());
126 let res = match bin_op {
131 _ => bug!("We already established it has to be one of these operators."),
133 Ok((Scalar::from_bool(res), false))
135 Gt | Ge if left.is_ptr() && right.is_bits() => {
136 // "ptr >[=] integer" can be tested if the integer is small enough.
137 let left = left.assert_ptr();
138 let right = right.assert_bits(self.memory().pointer_size());
139 let (_alloc_size, alloc_align) = self.memory()
140 .get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
141 .expect("alloc info with MaybeDead cannot fail");
142 let min_ptr_val = u128::from(alloc_align.bytes()) + u128::from(left.offset.bytes());
143 let result = match bin_op {
144 Gt => min_ptr_val > right,
145 Ge => min_ptr_val >= right,
150 (Scalar::from_bool(true), false)
152 // Sorry, can't tell.
153 throw_unsup!(InvalidPointerMath)
156 // These work if the left operand is a pointer, and the right an integer
157 Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
158 // Cast to i128 is fine as we checked the kind to be ptr-sized
159 self.ptr_int_arithmetic(
162 right.assert_bits(self.memory().pointer_size()),
163 right_layout.abi.is_signed(),
166 // Commutative operators also work if the integer is on the left
167 Add | BitAnd if left.is_bits() && right.is_ptr() => {
168 // This is a commutative operation, just swap the operands
169 self.ptr_int_arithmetic(
172 left.assert_bits(self.memory().pointer_size()),
173 left_layout.abi.is_signed(),
176 // Nothing else works
177 _ => throw_unsup!(InvalidPointerMath),
185 ) -> InterpResult<'tcx, bool> {
186 let size = self.pointer_size();
187 // Just compare the integers.
188 // TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
189 let left = self.force_bits(left, size)?;
190 let right = self.force_bits(right, size)?;
194 fn ptr_int_arithmetic(
200 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
201 use rustc::mir::BinOp::*;
203 fn map_to_primval((res, over): (Pointer<Tag>, bool)) -> (Scalar<Tag>, bool) {
204 (Scalar::Ptr(res), over)
209 // The only way this can overflow is by underflowing, so signdeness of the right
210 // operands does not matter.
211 map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
213 map_to_primval(left.overflowing_signed_offset(right as i128, self)),
215 map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
217 BitAnd if !signed => {
218 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
219 .expect("alloc info with MaybeDead cannot fail")
222 // FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
223 let shift = 128 - self.memory().pointer_size().bits();
224 let value = !(ptr_base_align as u128 - 1);
225 // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
226 (value << shift) >> shift
228 let ptr_size = self.memory().pointer_size();
229 trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
230 ptr_base_align, right, base_mask);
231 if right & base_mask == base_mask {
232 // Case 1: the base address bits are all preserved, i.e., right is all-1 there.
233 let offset = (left.offset.bytes() as u128 & right) as u64;
235 Scalar::Ptr(Pointer::new_with_tag(
237 Size::from_bytes(offset),
242 } else if right & base_mask == 0 {
243 // Case 2: the base address bits are all taken away, i.e., right is all-0 there.
244 let v = Scalar::from_uint((left.offset.bytes() as u128) & right, ptr_size);
247 throw_unsup!(ReadPointerAsBytes);
252 // Doing modulo a divisor of the alignment is allowed.
253 // (Intuition: modulo a divisor leaks less information.)
254 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
255 .expect("alloc info with MaybeDead cannot fail")
257 let right = right as u64;
258 let ptr_size = self.memory().pointer_size();
260 // Modulo 1 is always 0.
261 (Scalar::from_uint(0u32, ptr_size), false)
262 } else if ptr_base_align % right == 0 {
263 // The base address would be cancelled out by the modulo operation, so we can
264 // just take the modulo of the offset.
266 Scalar::from_uint((left.offset.bytes() % right) as u128, ptr_size),
270 throw_unsup!(ReadPointerAsBytes);
276 "unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
280 if signed { "signed" } else { "unsigned" }
282 throw_unsup!(Unimplemented(msg));
287 /// Raises an error if the offset moves the pointer outside of its allocation.
288 /// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
289 /// moves in there because the size is 0). We also consider the NULL pointer its own separate
290 /// allocation, and all the remaining integers pointers their own allocation.
291 fn pointer_offset_inbounds(
294 pointee_ty: Ty<'tcx>,
296 ) -> InterpResult<'tcx, Scalar<Tag>> {
297 // FIXME: assuming here that type size is less than `i64::max_value()`.
298 let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
300 .checked_mul(pointee_size)
301 .ok_or_else(|| err_panic!(Overflow(mir::BinOp::Mul)))?;
302 // Now let's see what kind of pointer this is.
303 let ptr = if offset == 0 {
305 Scalar::Ptr(ptr) => ptr,
306 Scalar::Raw { .. } => {
307 // Offset 0 on an integer. We accept that, pretending there is
308 // a little zero-sized allocation here.
313 // Offset > 0. We *require* a pointer.
316 // Both old and new pointer must be in-bounds of a *live* allocation.
317 // (Of the same allocation, but that part is trivial with our representation.)
318 self.pointer_inbounds(ptr)?;
319 let ptr = ptr.signed_offset(offset, self)?;
320 self.pointer_inbounds(ptr)?;