1 use rustc::ty::{Ty, layout::{Size, LayoutOf}};
6 pub trait EvalContextExt<'tcx> {
10 ) -> InterpResult<'tcx>;
15 left: ImmTy<'tcx, Tag>,
16 right: ImmTy<'tcx, Tag>,
17 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
19 fn ptr_int_arithmetic(
25 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
31 ) -> InterpResult<'tcx, bool>;
33 fn pointer_offset_inbounds(
38 ) -> InterpResult<'tcx, Scalar<Tag>>;
41 impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
42 /// Test if the pointer is in-bounds of a live allocation.
44 fn pointer_inbounds(&self, ptr: Pointer<Tag>) -> InterpResult<'tcx> {
45 let (size, _align) = self.memory().get_size_and_align(ptr.alloc_id, AllocCheck::Live)?;
46 ptr.check_in_alloc(size, CheckInAllocMsg::InboundsTest)
52 left: ImmTy<'tcx, Tag>,
53 right: ImmTy<'tcx, Tag>,
54 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
55 use rustc::mir::BinOp::*;
57 trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
59 // Treat everything of integer *type* at integer *value*.
60 if left.layout.ty.is_integral() {
61 // This is actually an integer operation, so dispatch back to the core engine.
62 // TODO: Once intptrcast is the default, librustc_mir should never even call us
64 assert!(right.layout.ty.is_integral());
65 let l_bits = self.force_bits(left.imm.to_scalar()?, left.layout.size)?;
66 let r_bits = self.force_bits(right.imm.to_scalar()?, right.layout.size)?;
68 let left = ImmTy::from_scalar(Scalar::from_uint(l_bits, left.layout.size), left.layout);
69 let right = ImmTy::from_scalar(Scalar::from_uint(r_bits, left.layout.size), right.layout);
71 return self.binary_op(bin_op, left, right);
74 // Operations that support fat pointers
77 let eq = match (*left, *right) {
78 (Immediate::Scalar(left), Immediate::Scalar(right)) =>
79 self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
80 (Immediate::ScalarPair(left1, left2), Immediate::ScalarPair(right1, right2)) =>
81 self.ptr_eq(left1.not_undef()?, right1.not_undef()?)? &&
82 self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
83 _ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
85 return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
90 // Now we expect no more fat pointers.
91 let left_layout = left.layout;
92 let left = left.to_scalar()?;
93 let right_layout = right.layout;
94 let right = right.to_scalar()?;
95 debug_assert!(left.is_ptr() || right.is_ptr() || bin_op == Offset);
99 let pointee_ty = left_layout.ty
101 .expect("Offset called on non-ptr type")
103 let ptr = self.pointer_offset_inbounds(
106 right.to_isize(self)?,
110 // These need both to be pointer, and fail if they are not in the same location
111 Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
112 let left = left.to_ptr().expect("we checked is_ptr");
113 let right = right.to_ptr().expect("we checked is_ptr");
114 if left.alloc_id == right.alloc_id {
115 let res = match bin_op {
116 Lt => left.offset < right.offset,
117 Le => left.offset <= right.offset,
118 Gt => left.offset > right.offset,
119 Ge => left.offset >= right.offset,
121 // subtract the offsets
122 let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
123 let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
124 let layout = self.layout_of(self.tcx.types.usize)?;
125 return self.binary_op(
127 ImmTy::from_scalar(left_offset, layout),
128 ImmTy::from_scalar(right_offset, layout),
131 _ => bug!("We already established it has to be one of these operators."),
133 Ok((Scalar::from_bool(res), false))
135 // Both are pointers, but from different allocations.
136 err!(InvalidPointerMath)
139 Gt | Ge if left.is_ptr() && right.is_bits() => {
140 // "ptr >[=] integer" can be tested if the integer is small enough.
141 let left = left.to_ptr().expect("we checked is_ptr");
142 let right = right.to_bits(self.memory().pointer_size()).expect("we checked is_bits");
143 let (_alloc_size, alloc_align) = self.memory()
144 .get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
145 .expect("alloc info with MaybeDead cannot fail");
146 let min_ptr_val = u128::from(alloc_align.bytes()) + u128::from(left.offset.bytes());
147 let result = match bin_op {
148 Gt => min_ptr_val > right,
149 Ge => min_ptr_val >= right,
154 Ok((Scalar::from_bool(true), false))
156 // Sorry, can't tell.
157 err!(InvalidPointerMath)
160 // These work if the left operand is a pointer, and the right an integer
161 Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
162 // Cast to i128 is fine as we checked the kind to be ptr-sized
163 self.ptr_int_arithmetic(
165 left.to_ptr().expect("we checked is_ptr"),
166 right.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
167 right_layout.abi.is_signed(),
170 // Commutative operators also work if the integer is on the left
171 Add | BitAnd if left.is_bits() && right.is_ptr() => {
172 // This is a commutative operation, just swap the operands
173 self.ptr_int_arithmetic(
175 right.to_ptr().expect("we checked is_ptr"),
176 left.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
177 left_layout.abi.is_signed(),
180 // Nothing else works
181 _ => err!(InvalidPointerMath),
189 ) -> InterpResult<'tcx, bool> {
190 let size = self.pointer_size();
191 // Just compare the integers.
192 // TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
193 let left = self.force_bits(left, size)?;
194 let right = self.force_bits(right, size)?;
198 fn ptr_int_arithmetic(
204 ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
205 use rustc::mir::BinOp::*;
207 fn map_to_primval((res, over): (Pointer<Tag>, bool)) -> (Scalar<Tag>, bool) {
208 (Scalar::Ptr(res), over)
213 // The only way this can overflow is by underflowing, so signdeness of the right
214 // operands does not matter.
215 map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
217 map_to_primval(left.overflowing_signed_offset(right as i128, self)),
219 map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
221 BitAnd if !signed => {
222 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
223 .expect("alloc info with MaybeDead cannot fail")
226 // FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
227 let shift = 128 - self.memory().pointer_size().bits();
228 let value = !(ptr_base_align as u128 - 1);
229 // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
230 (value << shift) >> shift
232 let ptr_size = self.memory().pointer_size();
233 trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
234 ptr_base_align, right, base_mask);
235 if right & base_mask == base_mask {
236 // Case 1: the base address bits are all preserved, i.e., right is all-1 there.
237 let offset = (left.offset.bytes() as u128 & right) as u64;
239 Scalar::Ptr(Pointer::new_with_tag(
241 Size::from_bytes(offset),
246 } else if right & base_mask == 0 {
247 // Case 2: the base address bits are all taken away, i.e., right is all-0 there.
248 let v = Scalar::from_uint((left.offset.bytes() as u128) & right, ptr_size);
251 return err!(ReadPointerAsBytes);
256 // Doing modulo a divisor of the alignment is allowed.
257 // (Intuition: modulo a divisor leaks less information.)
258 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
259 .expect("alloc info with MaybeDead cannot fail")
261 let right = right as u64;
262 let ptr_size = self.memory().pointer_size();
264 // Modulo 1 is always 0.
265 (Scalar::from_uint(0u32, ptr_size), false)
266 } else if ptr_base_align % right == 0 {
267 // The base address would be cancelled out by the modulo operation, so we can
268 // just take the modulo of the offset.
270 Scalar::from_uint((left.offset.bytes() % right) as u128, ptr_size),
274 return err!(ReadPointerAsBytes);
280 "unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
284 if signed { "signed" } else { "unsigned" }
286 return err!(Unimplemented(msg));
291 /// Raises an error if the offset moves the pointer outside of its allocation.
292 /// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
293 /// moves in there because the size is 0). We also consider the NULL pointer its own separate
294 /// allocation, and all the remaining integers pointers their own allocation.
295 fn pointer_offset_inbounds(
298 pointee_ty: Ty<'tcx>,
300 ) -> InterpResult<'tcx, Scalar<Tag>> {
301 // FIXME: assuming here that type size is less than `i64::max_value()`.
302 let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
304 .checked_mul(pointee_size)
305 .ok_or_else(|| InterpError::Overflow(mir::BinOp::Mul))?;
306 // Now let's see what kind of pointer this is.
307 let ptr = if offset == 0 {
309 Scalar::Ptr(ptr) => ptr,
310 Scalar::Raw { .. } => {
311 // Offset 0 on an integer. We accept that, pretending there is
312 // a little zero-sized allocation here.
317 // Offset > 0. We *require* a pointer.
320 // Both old and new pointer must be in-bounds of a *live* allocation.
321 // (Of the same allocation, but that part is trivial with our representation.)
322 self.pointer_inbounds(ptr)?;
323 let ptr = ptr.signed_offset(offset, self)?;
324 self.pointer_inbounds(ptr)?;