]> git.lizzy.rs Git - rust.git/blob - src/operator.rs
Auto merge of #851 - RalfJung:intrptrcast-by-default, r=oli-obk
[rust.git] / src / operator.rs
1 use rustc::ty::{Ty, layout::{Size, LayoutOf}};
2 use rustc::mir;
3
4 use crate::*;
5
6 pub trait EvalContextExt<'tcx> {
7     fn pointer_inbounds(
8         &self,
9         ptr: Pointer<Tag>
10     ) -> InterpResult<'tcx>;
11
12     fn ptr_op(
13         &self,
14         bin_op: mir::BinOp,
15         left: ImmTy<'tcx, Tag>,
16         right: ImmTy<'tcx, Tag>,
17     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
18
19     fn ptr_int_arithmetic(
20         &self,
21         bin_op: mir::BinOp,
22         left: Pointer<Tag>,
23         right: u128,
24         signed: bool,
25     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;
26
27     fn ptr_eq(
28         &self,
29         left: Scalar<Tag>,
30         right: Scalar<Tag>,
31     ) -> InterpResult<'tcx, bool>;
32
33     fn pointer_offset_inbounds(
34         &self,
35         ptr: Scalar<Tag>,
36         pointee_ty: Ty<'tcx>,
37         offset: i64,
38     ) -> InterpResult<'tcx, Scalar<Tag>>;
39 }
40
41 impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
42     /// Test if the pointer is in-bounds of a live allocation.
43     #[inline]
44     fn pointer_inbounds(&self, ptr: Pointer<Tag>) -> InterpResult<'tcx> {
45         let (size, _align) = self.memory().get_size_and_align(ptr.alloc_id, AllocCheck::Live)?;
46         ptr.check_in_alloc(size, CheckInAllocMsg::InboundsTest)
47     }
48
49     fn ptr_op(
50         &self,
51         bin_op: mir::BinOp,
52         left: ImmTy<'tcx, Tag>,
53         right: ImmTy<'tcx, Tag>,
54     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
55         use rustc::mir::BinOp::*;
56
57         trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);
58
59         // Treat everything of integer *type* at integer *value*.
60         if left.layout.ty.is_integral() {
61             // This is actually an integer operation, so dispatch back to the core engine.
62             // TODO: Once intptrcast is the default, librustc_mir should never even call us
63             // for integer types.
64             assert!(right.layout.ty.is_integral());
65             let l_bits = self.force_bits(left.imm.to_scalar()?, left.layout.size)?;
66             let r_bits = self.force_bits(right.imm.to_scalar()?, right.layout.size)?;
67             
68             let left = ImmTy::from_scalar(Scalar::from_uint(l_bits, left.layout.size), left.layout);
69             let right = ImmTy::from_scalar(Scalar::from_uint(r_bits, left.layout.size), right.layout);
70
71             return self.binary_op(bin_op, left, right);
72         } 
73
74         // Operations that support fat pointers
75         match bin_op {
76             Eq | Ne => {
77                 let eq = match (*left, *right) {
78                     (Immediate::Scalar(left), Immediate::Scalar(right)) =>
79                         self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
80                     (Immediate::ScalarPair(left1, left2), Immediate::ScalarPair(right1, right2)) =>
81                         self.ptr_eq(left1.not_undef()?, right1.not_undef()?)? &&
82                         self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
83                     _ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
84                 };
85                 return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
86             }
87             _ => {},
88         }
89
90         // Now we expect no more fat pointers.
91         let left_layout = left.layout;
92         let left = left.to_scalar()?;
93         let right_layout = right.layout;
94         let right = right.to_scalar()?;
95         debug_assert!(left.is_ptr() || right.is_ptr() || bin_op == Offset);
96
97         match bin_op {
98             Offset => {
99                 let pointee_ty = left_layout.ty
100                     .builtin_deref(true)
101                     .expect("Offset called on non-ptr type")
102                     .ty;
103                 let ptr = self.pointer_offset_inbounds(
104                     left,
105                     pointee_ty,
106                     right.to_isize(self)?,
107                 )?;
108                 Ok((ptr, false))
109             }
110             // These need both to be pointer, and fail if they are not in the same location
111             Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
112                 let left = left.to_ptr().expect("we checked is_ptr");
113                 let right = right.to_ptr().expect("we checked is_ptr");
114                 if left.alloc_id == right.alloc_id {
115                     let res = match bin_op {
116                         Lt => left.offset < right.offset,
117                         Le => left.offset <= right.offset,
118                         Gt => left.offset > right.offset,
119                         Ge => left.offset >= right.offset,
120                         Sub => {
121                             // subtract the offsets
122                             let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
123                             let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
124                             let layout = self.layout_of(self.tcx.types.usize)?;
125                             return self.binary_op(
126                                 Sub,
127                                 ImmTy::from_scalar(left_offset, layout),
128                                 ImmTy::from_scalar(right_offset, layout),
129                             )
130                         }
131                         _ => bug!("We already established it has to be one of these operators."),
132                     };
133                     Ok((Scalar::from_bool(res), false))
134                 } else {
135                     // Both are pointers, but from different allocations.
136                     err!(InvalidPointerMath)
137                 }
138             }
139             Gt | Ge if left.is_ptr() && right.is_bits() => {
140                 // "ptr >[=] integer" can be tested if the integer is small enough.
141                 let left = left.to_ptr().expect("we checked is_ptr");
142                 let right = right.to_bits(self.memory().pointer_size()).expect("we checked is_bits");
143                 let (_alloc_size, alloc_align) = self.memory()
144                     .get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
145                     .expect("alloc info with MaybeDead cannot fail");
146                 let min_ptr_val = u128::from(alloc_align.bytes()) + u128::from(left.offset.bytes());
147                 let result = match bin_op {
148                     Gt => min_ptr_val > right,
149                     Ge => min_ptr_val >= right,
150                     _ => bug!(),
151                 };
152                 if result {
153                     // Definitely true!
154                     Ok((Scalar::from_bool(true), false))
155                 } else {
156                     // Sorry, can't tell.
157                     err!(InvalidPointerMath)
158                 }
159             }
160             // These work if the left operand is a pointer, and the right an integer
161             Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
162                 // Cast to i128 is fine as we checked the kind to be ptr-sized
163                 self.ptr_int_arithmetic(
164                     bin_op,
165                     left.to_ptr().expect("we checked is_ptr"),
166                     right.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
167                     right_layout.abi.is_signed(),
168                 )
169             }
170             // Commutative operators also work if the integer is on the left
171             Add | BitAnd if left.is_bits() && right.is_ptr() => {
172                 // This is a commutative operation, just swap the operands
173                 self.ptr_int_arithmetic(
174                     bin_op,
175                     right.to_ptr().expect("we checked is_ptr"),
176                     left.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
177                     left_layout.abi.is_signed(),
178                 )
179             }
180             // Nothing else works
181             _ => err!(InvalidPointerMath),
182         }
183     }
184
185     fn ptr_eq(
186         &self,
187         left: Scalar<Tag>,
188         right: Scalar<Tag>,
189     ) -> InterpResult<'tcx, bool> {
190         let size = self.pointer_size();
191         // Just compare the integers.
192         // TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
193         let left = self.force_bits(left, size)?;
194         let right = self.force_bits(right, size)?;
195         Ok(left == right)
196     }
197
198     fn ptr_int_arithmetic(
199         &self,
200         bin_op: mir::BinOp,
201         left: Pointer<Tag>,
202         right: u128,
203         signed: bool,
204     ) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
205         use rustc::mir::BinOp::*;
206
207         fn map_to_primval((res, over): (Pointer<Tag>, bool)) -> (Scalar<Tag>, bool) {
208             (Scalar::Ptr(res), over)
209         }
210
211         Ok(match bin_op {
212             Sub =>
213                 // The only way this can overflow is by underflowing, so signdeness of the right
214                 // operands does not matter.
215                 map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
216             Add if signed =>
217                 map_to_primval(left.overflowing_signed_offset(right as i128, self)),
218             Add if !signed =>
219                 map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
220
221             BitAnd if !signed => {
222                 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
223                     .expect("alloc info with MaybeDead cannot fail")
224                     .1.bytes();
225                 let base_mask = {
226                     // FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
227                     let shift = 128 - self.memory().pointer_size().bits();
228                     let value = !(ptr_base_align as u128 - 1);
229                     // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
230                     (value << shift) >> shift
231                 };
232                 let ptr_size = self.memory().pointer_size();
233                 trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
234                     ptr_base_align, right, base_mask);
235                 if right & base_mask == base_mask {
236                     // Case 1: the base address bits are all preserved, i.e., right is all-1 there.
237                     let offset = (left.offset.bytes() as u128 & right) as u64;
238                     (
239                         Scalar::Ptr(Pointer::new_with_tag(
240                             left.alloc_id,
241                             Size::from_bytes(offset),
242                             left.tag,
243                         )),
244                         false,
245                     )
246                 } else if right & base_mask == 0 {
247                     // Case 2: the base address bits are all taken away, i.e., right is all-0 there.
248                     let v = Scalar::from_uint((left.offset.bytes() as u128) & right, ptr_size);
249                     (v, false)
250                 } else {
251                     return err!(ReadPointerAsBytes);
252                 }
253             }
254
255             Rem if !signed => {
256                 // Doing modulo a divisor of the alignment is allowed.
257                 // (Intuition: modulo a divisor leaks less information.)
258                 let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
259                     .expect("alloc info with MaybeDead cannot fail")
260                     .1.bytes();
261                 let right = right as u64;
262                 let ptr_size = self.memory().pointer_size();
263                 if right == 1 {
264                     // Modulo 1 is always 0.
265                     (Scalar::from_uint(0u32, ptr_size), false)
266                 } else if ptr_base_align % right == 0 {
267                     // The base address would be cancelled out by the modulo operation, so we can
268                     // just take the modulo of the offset.
269                     (
270                         Scalar::from_uint((left.offset.bytes() % right) as u128, ptr_size),
271                         false,
272                     )
273                 } else {
274                     return err!(ReadPointerAsBytes);
275                 }
276             }
277
278             _ => {
279                 let msg = format!(
280                     "unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
281                     bin_op,
282                     left,
283                     right,
284                     if signed { "signed" } else { "unsigned" }
285                 );
286                 return err!(Unimplemented(msg));
287             }
288         })
289     }
290
291     /// Raises an error if the offset moves the pointer outside of its allocation.
292     /// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
293     /// moves in there because the size is 0). We also consider the NULL pointer its own separate
294     /// allocation, and all the remaining integers pointers their own allocation.
295     fn pointer_offset_inbounds(
296         &self,
297         ptr: Scalar<Tag>,
298         pointee_ty: Ty<'tcx>,
299         offset: i64,
300     ) -> InterpResult<'tcx, Scalar<Tag>> {
301         // FIXME: assuming here that type size is less than `i64::max_value()`.
302         let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
303         let offset = offset
304             .checked_mul(pointee_size)
305             .ok_or_else(|| InterpError::Overflow(mir::BinOp::Mul))?;
306         // Now let's see what kind of pointer this is.
307         let ptr = if offset == 0 {
308             match ptr {
309                 Scalar::Ptr(ptr) => ptr,
310                 Scalar::Raw { .. } => {
311                     // Offset 0 on an integer. We accept that, pretending there is
312                     // a little zero-sized allocation here.
313                     return Ok(ptr);
314                 }
315             }
316         } else {
317             // Offset > 0. We *require* a pointer.
318             self.force_ptr(ptr)?
319         };
320         // Both old and new pointer must be in-bounds of a *live* allocation.
321         // (Of the same allocation, but that part is trivial with our representation.)
322         self.pointer_inbounds(ptr)?;
323         let ptr = ptr.signed_offset(offset, self)?;
324         self.pointer_inbounds(ptr)?;
325         Ok(Scalar::Ptr(ptr))
326     }
327 }