]> git.lizzy.rs Git - rust.git/blob - src/operator.rs
d564b5f19f95b0c38912217190837fc6138becbd
[rust.git] / src / operator.rs
1 use rustc::ty::{Ty, layout::{Size, LayoutOf}};
2 use rustc::mir;
3
4 use crate::*;
5
6 pub trait EvalContextExt<'tcx> {
7     fn pointer_inbounds(
8         &self,
9         ptr: Pointer<Tag>
10     ) -> InterpResult<'tcx>;
11
12     fn binary_ptr_op(
13         &self,
14         bin_op: mir::BinOp,
15         left: ImmTy<'tcx, Tag>,
16         right: ImmTy<'tcx, Tag>,
17     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
18
19     fn ptr_int_arithmetic(
20         &self,
21         bin_op: mir::BinOp,
22         left: Pointer<Tag>,
23         right: u128,
24         signed: bool,
25     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
26
27     fn ptr_eq(
28         &self,
29         left: Scalar<Tag>,
30         right: Scalar<Tag>,
31     ) -> InterpResult<'tcx, bool>;
32
33     fn pointer_offset_inbounds(
34         &self,
35         ptr: Scalar<Tag>,
36         pointee_ty: Ty<'tcx>,
37         offset: i64,
38     ) -> InterpResult<'tcx, Scalar<Tag>>;
39 }
40
41 impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
42     /// Test if the pointer is in-bounds of a live allocation.
43     #[inline]
44     fn pointer_inbounds(&self, ptr: Pointer<Tag>) -> InterpResult<'tcx> {
45         let (size, _align) = self.memory().get_size_and_align(ptr.alloc_id, AllocCheck::Live)?;
46         ptr.check_in_alloc(size, CheckInAllocMsg::InboundsTest)
47     }
48
49     fn binary_ptr_op(
50         &self,
51         bin_op: mir::BinOp,
52         left: ImmTy<'tcx, Tag>,
53         right: ImmTy<'tcx, Tag>,
54     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
55         use rustc::mir::BinOp::*;
56
57         trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
58
59         // Operations that support fat pointers
60         match bin_op {
61             Eq | Ne => {
62                 let eq = match (*left, *right) {
63                     (Immediate::Scalar(left), Immediate::Scalar(right)) =>
64                         self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
65                     (Immediate::ScalarPair(left1, left2), Immediate::ScalarPair(right1, right2)) =>
66                         self.ptr_eq(left1.not_undef()?, right1.not_undef()?)? &&
67                         self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
68                     _ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
69                 };
70                 return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
71             }
72             _ => {},
73         }
74
75         // Now we expect no more fat pointers.
76         let left_layout = left.layout;
77         let left = left.to_scalar()?;
78         let right_layout = right.layout;
79         let right = right.to_scalar()?;
80
81         Ok(match bin_op {
82             Offset => {
83                 let pointee_ty = left_layout.ty
84                     .builtin_deref(true)
85                     .expect("Offset called on non-ptr type")
86                     .ty;
87                 let ptr = self.pointer_offset_inbounds(
88                     left,
89                     pointee_ty,
90                     right.to_isize(self)?,
91                 )?;
92                 (ptr, false)
93             }
94             // These need both to be pointer, and fail if they are not in the same location
95             Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
96                 let left = left.assert_ptr();
97                 let right = right.assert_ptr();
98                 if left.alloc_id == right.alloc_id {
99                     let res = match bin_op {
100                         Lt => left.offset < right.offset,
101                         Le => left.offset <= right.offset,
102                         Gt => left.offset > right.offset,
103                         Ge => left.offset >= right.offset,
104                         Sub => {
105                             // subtract the offsets
106                             let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
107                             let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
108                             let layout = self.layout_of(self.tcx.types.usize)?;
109                             return self.binary_op(
110                                 Sub,
111                                 ImmTy::from_scalar(left_offset, layout),
112                                 ImmTy::from_scalar(right_offset, layout),
113                             )
114                         }
115                         _ => bug!("We already established it has to be one of these operators."),
116                     };
117                     (Scalar::from_bool(res), false)
118                 } else {
119                     // Both are pointers, but from different allocations.
120                     throw_unsup!(InvalidPointerMath)
121                 }
122             }
123             Lt | Le | Gt | Ge if left.is_bits() && right.is_bits() => {
124                 let left = left.assert_bits(self.memory().pointer_size());
125                 let right = right.assert_bits(self.memory().pointer_size());
126                 let res = match bin_op {
127                     Lt => left < right,
128                     Le => left <= right,
129                     Gt => left > right,
130                     Ge => left >= right,
131                     _ => bug!("We already established it has to be one of these operators."),
132                 };
133                 Ok((Scalar::from_bool(res), false))
134             }
135             Gt | Ge if left.is_ptr() && right.is_bits() => {
136                 // "ptr >[=] integer" can be tested if the integer is small enough.
137                 let left = left.assert_ptr();
138                 let right = right.assert_bits(self.memory().pointer_size());
139                 let (_alloc_size, alloc_align) = self.memory()
140                     .get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
141                     .expect("alloc info with MaybeDead cannot fail");
142                 let min_ptr_val = u128::from(alloc_align.bytes()) + u128::from(left.offset.bytes());
143                 let result = match bin_op {
144                     Gt => min_ptr_val > right,
145                     Ge => min_ptr_val >= right,
146                     _ => bug!(),
147                 };
148                 if result {
149                     // Definitely true!
150                     (Scalar::from_bool(true), false)
151                 } else {
152                     // Sorry, can't tell.
153                     throw_unsup!(InvalidPointerMath)
154                 }
155             }
156             // These work if the left operand is a pointer, and the right an integer
157             Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
158                 // Cast to i128 is fine as we checked the kind to be ptr-sized
159                 self.ptr_int_arithmetic(
160                     bin_op,
161                     left.assert_ptr(),
162                     right.assert_bits(self.memory().pointer_size()),
163                     right_layout.abi.is_signed(),
164                 )?
165             }
166             // Commutative operators also work if the integer is on the left
167             Add | BitAnd if left.is_bits() && right.is_ptr() => {
168                 // This is a commutative operation, just swap the operands
169                 self.ptr_int_arithmetic(
170                     bin_op,
171                     right.assert_ptr(),
172                     left.assert_bits(self.memory().pointer_size()),
173                     left_layout.abi.is_signed(),
174                 )?
175             }
176             // Nothing else works
177             _ => throw_unsup!(InvalidPointerMath),
178         })
179     }
180
181     fn ptr_eq(
182         &self,
183         left: Scalar<Tag>,
184         right: Scalar<Tag>,
185     ) -> InterpResult<'tcx, bool> {
186         let size = self.pointer_size();
187         // Just compare the integers.
188         // TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
189         let left = self.force_bits(left, size)?;
190         let right = self.force_bits(right, size)?;
191         Ok(left == right)
192     }
193
194     fn ptr_int_arithmetic(
195         &self,
196         bin_op: mir::BinOp,
197         left: Pointer<Tag>,
198         right: u128,
199         signed: bool,
200     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
201         use rustc::mir::BinOp::*;
202
203         fn map_to_primval((res, over): (Pointer<Tag>, bool)) -> (Scalar<Tag>, bool) {
204             (Scalar::Ptr(res), over)
205         }
206
207         Ok(match bin_op {
208             Sub =>
209                 // The only way this can overflow is by underflowing, so signdeness of the right
210                 // operands does not matter.
211                 map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
212             Add if signed =>
213                 map_to_primval(left.overflowing_signed_offset(right as i128, self)),
214             Add if !signed =>
215                 map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
216
217             BitAnd if !signed => {
218                 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
219                     .expect("alloc info with MaybeDead cannot fail")
220                     .1.bytes();
221                 let base_mask = {
222                     // FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
223                     let shift = 128 - self.memory().pointer_size().bits();
224                     let value = !(ptr_base_align as u128 - 1);
225                     // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
226                     (value << shift) >> shift
227                 };
228                 let ptr_size = self.memory().pointer_size();
229                 trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
230                     ptr_base_align, right, base_mask);
231                 if right & base_mask == base_mask {
232                     // Case 1: the base address bits are all preserved, i.e., right is all-1 there.
233                     let offset = (left.offset.bytes() as u128 & right) as u64;
234                     (
235                         Scalar::Ptr(Pointer::new_with_tag(
236                             left.alloc_id,
237                             Size::from_bytes(offset),
238                             left.tag,
239                         )),
240                         false,
241                     )
242                 } else if right & base_mask == 0 {
243                     // Case 2: the base address bits are all taken away, i.e., right is all-0 there.
244                     let v = Scalar::from_uint((left.offset.bytes() as u128) & right, ptr_size);
245                     (v, false)
246                 } else {
247                     throw_unsup!(ReadPointerAsBytes);
248                 }
249             }
250
251             Rem if !signed => {
252                 // Doing modulo a divisor of the alignment is allowed.
253                 // (Intuition: modulo a divisor leaks less information.)
254                 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
255                     .expect("alloc info with MaybeDead cannot fail")
256                     .1.bytes();
257                 let right = right as u64;
258                 let ptr_size = self.memory().pointer_size();
259                 if right == 1 {
260                     // Modulo 1 is always 0.
261                     (Scalar::from_uint(0u32, ptr_size), false)
262                 } else if ptr_base_align % right == 0 {
263                     // The base address would be cancelled out by the modulo operation, so we can
264                     // just take the modulo of the offset.
265                     (
266                         Scalar::from_uint((left.offset.bytes() % right) as u128, ptr_size),
267                         false,
268                     )
269                 } else {
270                     throw_unsup!(ReadPointerAsBytes);
271                 }
272             }
273
274             _ => {
275                 let msg = format!(
276                     "unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
277                     bin_op,
278                     left,
279                     right,
280                     if signed { "signed" } else { "unsigned" }
281                 );
282                 throw_unsup!(Unimplemented(msg));
283             }
284         })
285     }
286
287     /// Raises an error if the offset moves the pointer outside of its allocation.
288     /// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
289     /// moves in there because the size is 0). We also consider the NULL pointer its own separate
290     /// allocation, and all the remaining integers pointers their own allocation.
291     fn pointer_offset_inbounds(
292         &self,
293         ptr: Scalar<Tag>,
294         pointee_ty: Ty<'tcx>,
295         offset: i64,
296     ) -> InterpResult<'tcx, Scalar<Tag>> {
297         // FIXME: assuming here that type size is less than `i64::max_value()`.
298         let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
299         let offset = offset
300             .checked_mul(pointee_size)
301             .ok_or_else(|| err_panic!(Overflow(mir::BinOp::Mul)))?;
302         // Now let's see what kind of pointer this is.
303         let ptr = if offset == 0 {
304             match ptr {
305                 Scalar::Ptr(ptr) => ptr,
306                 Scalar::Raw { .. } => {
307                     // Offset 0 on an integer. We accept that, pretending there is
308                     // a little zero-sized allocation here.
309                     return Ok(ptr);
310                 }
311             }
312         } else {
313             // Offset > 0. We *require* a pointer.
314             self.force_ptr(ptr)?
315         };
316         // Both old and new pointer must be in-bounds of a *live* allocation.
317         // (Of the same allocation, but that part is trivial with our representation.)
318         self.pointer_inbounds(ptr)?;
319         let ptr = ptr.signed_offset(offset, self)?;
320         self.pointer_inbounds(ptr)?;
321         Ok(Scalar::Ptr(ptr))
322     }
323 }