]> git.lizzy.rs Git - rust.git/blob - src/operator.rs
rustup for big refactor; kill most of validation
[rust.git] / src / operator.rs
1 use rustc::ty::{self, Ty};
2 use rustc::ty::layout::{TyLayout, Primitive};
3 use rustc::mir;
4
5 use super::*;
6
7 pub trait EvalContextExt<'tcx> {
8     fn ptr_op(
9         &self,
10         bin_op: mir::BinOp,
11         left: Scalar,
12         left_layout: TyLayout<'tcx>,
13         right: Scalar,
14         right_layout: TyLayout<'tcx>,
15     ) -> EvalResult<'tcx, Option<(Scalar, bool)>>;
16
17     fn ptr_int_arithmetic(
18         &self,
19         bin_op: mir::BinOp,
20         left: Pointer,
21         right: i128,
22         signed: bool,
23     ) -> EvalResult<'tcx, (Scalar, bool)>;
24
25     fn pointer_offset_inbounds(
26         &self,
27         ptr: Scalar,
28         pointee_ty: Ty<'tcx>,
29         offset: i64,
30     ) -> EvalResult<'tcx, Scalar>;
31 }
32
33 impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
34     fn ptr_op(
35         &self,
36         bin_op: mir::BinOp,
37         left: Scalar,
38         left_layout: TyLayout<'tcx>,
39         right: Scalar,
40         right_layout: TyLayout<'tcx>,
41     ) -> EvalResult<'tcx, Option<(Scalar, bool)>> {
42         trace!("ptr_op: {:?} {:?} {:?}", left, bin_op, right);
43
44         use rustc::mir::BinOp::*;
45         use rustc::ty::layout::Integer::*;
46         let usize = Primitive::Int(match self.memory.pointer_size().bytes() {
47             1 => I8,
48             2 => I16,
49             4 => I32,
50             8 => I64,
51             16 => I128,
52             _ => unreachable!(),
53         }, /*signed*/ false);
54         let isize = Primitive::Int(match self.memory.pointer_size().bytes() {
55             1 => I8,
56             2 => I16,
57             4 => I32,
58             8 => I64,
59             16 => I128,
60             _ => unreachable!(),
61         }, /*signed*/ true);
62         let left_kind = match left_layout.abi {
63             ty::layout::Abi::Scalar(ref scalar) => scalar.value,
64             _ => Err(EvalErrorKind::TypeNotPrimitive(left_layout.ty))?,
65         };
66         let right_kind = match right_layout.abi {
67             ty::layout::Abi::Scalar(ref scalar) => scalar.value,
68             _ => Err(EvalErrorKind::TypeNotPrimitive(right_layout.ty))?,
69         };
70         match bin_op {
71             Offset => {
72                 assert!(left_kind == Primitive::Pointer && right_kind == usize);
73                 let pointee_ty = left_layout.ty
74                     .builtin_deref(true)
75                     .expect("Offset called on non-ptr type")
76                     .ty;
77                 let ptr = self.pointer_offset_inbounds(
78                     left,
79                     pointee_ty,
80                     right.to_bits(self.memory.pointer_size())? as i64,
81                 )?;
82                 Ok(Some((ptr, false)))
83             }
84             // These work on anything
85             Eq if left_kind == right_kind => {
86                 let result = match (left, right) {
87                     (Scalar::Bits { .. }, Scalar::Bits { .. }) => {
88                         left.to_bits(left_layout.size)? == right.to_bits(right_layout.size)?
89                     },
90                     // FIXME: Test if both allocations are still live *or* if they are in the same allocation? (same for Ne below)
91                     (Scalar::Ptr(left), Scalar::Ptr(right)) => left == right,
92                     // FIXME: We should probably error out when comparing anything but NULL with a pointer (same for Ne below)
93                     _ => false,
94                 };
95                 Ok(Some((Scalar::from_bool(result), false)))
96             }
97             Ne if left_kind == right_kind => {
98                 let result = match (left, right) {
99                     (Scalar::Bits { .. }, Scalar::Bits { .. }) => {
100                         left.to_bits(left_layout.size)? != right.to_bits(right_layout.size)?
101                     },
102                     (Scalar::Ptr(left), Scalar::Ptr(right)) => left != right,
103                     _ => true,
104                 };
105                 Ok(Some((Scalar::from_bool(result), false)))
106             }
107             // These need both pointers to be in the same allocation
108             Lt | Le | Gt | Ge | Sub
109                 if left_kind == right_kind &&
110                        (left_kind == Primitive::Pointer || left_kind == usize || left_kind == isize) &&
111                        left.is_ptr() && right.is_ptr() => {
112                 let left = left.to_ptr()?;
113                 let right = right.to_ptr()?;
114                 if left.alloc_id == right.alloc_id {
115                     let res = match bin_op {
116                         Lt => left.offset < right.offset,
117                         Le => left.offset <= right.offset,
118                         Gt => left.offset > right.offset,
119                         Ge => left.offset >= right.offset,
120                         Sub => {
121                             let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory.pointer_size());
122                             let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory.pointer_size());
123                             let layout = self.layout_of(self.tcx.types.usize)?;
124                             return self.binary_op(
125                                 Sub,
126                                 ValTy { value: Value::Scalar(left_offset.into()), layout },
127                                 ValTy { value: Value::Scalar(right_offset.into()), layout },
128                             ).map(Some)
129                         }
130                         _ => bug!("We already established it has to be one of these operators."),
131                     };
132                     Ok(Some((Scalar::from_bool(res), false)))
133                 } else {
134                     // Both are pointers, but from different allocations.
135                     err!(InvalidPointerMath)
136                 }
137             }
138             // These work if one operand is a pointer, the other an integer
139             Add | BitAnd | Sub
140                 if left_kind == right_kind && (left_kind == usize || left_kind == isize) &&
141                        left.is_ptr() && right.is_bits() => {
142                 // Cast to i128 is fine as we checked the kind to be ptr-sized
143                 self.ptr_int_arithmetic(
144                     bin_op,
145                     left.to_ptr()?,
146                     right.to_bits(self.memory.pointer_size())? as i128,
147                     left_kind == isize,
148                 ).map(Some)
149             }
150             Add | BitAnd
151                 if left_kind == right_kind && (left_kind == usize || left_kind == isize) &&
152                        left.is_bits() && right.is_ptr() => {
153                 // This is a commutative operation, just swap the operands
154                 self.ptr_int_arithmetic(
155                     bin_op,
156                     right.to_ptr()?,
157                     left.to_bits(self.memory.pointer_size())? as i128,
158                     left_kind == isize,
159                 ).map(Some)
160             }
161             _ => Ok(None),
162         }
163     }
164
165     fn ptr_int_arithmetic(
166         &self,
167         bin_op: mir::BinOp,
168         left: Pointer,
169         right: i128,
170         signed: bool,
171     ) -> EvalResult<'tcx, (Scalar, bool)> {
172         use rustc::mir::BinOp::*;
173
174         fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) {
175             (Scalar::Ptr(res), over)
176         }
177
178         Ok(match bin_op {
179             Sub =>
180                 // The only way this can overflow is by underflowing, so signdeness of the right operands does not matter
181                 map_to_primval(left.overflowing_signed_offset(-right, self)),
182             Add if signed =>
183                 map_to_primval(left.overflowing_signed_offset(right, self)),
184             Add if !signed =>
185                 map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),
186
187             BitAnd if !signed => {
188                 let base_mask : u64 = !(self.memory.get(left.alloc_id)?.align.abi() - 1);
189                 let right = right as u64;
190                 let ptr_size = self.memory.pointer_size().bytes() as u8;
191                 if right & base_mask == base_mask {
192                     // Case 1: The base address bits are all preserved, i.e., right is all-1 there
193                     (Scalar::Ptr(Pointer::new(left.alloc_id, Size::from_bytes(left.offset.bytes() & right))), false)
194                 } else if right & base_mask == 0 {
195                     // Case 2: The base address bits are all taken away, i.e., right is all-0 there
196                     (Scalar::Bits { bits: (left.offset.bytes() & right) as u128, size: ptr_size }, false)
197                 } else {
198                     return err!(ReadPointerAsBytes);
199                 }
200             }
201
202             _ => {
203                 let msg = format!("unimplemented binary op on pointer {:?}: {:?}, {:?} ({})", bin_op, left, right, if signed { "signed" } else { "unsigned" });
204                 return err!(Unimplemented(msg));
205             }
206         })
207     }
208
209     /// This function raises an error if the offset moves the pointer outside of its allocation.  We consider
210     /// ZSTs their own huge allocation that doesn't overlap with anything (and nothing moves in there because the size is 0).
211     /// We also consider the NULL pointer its own separate allocation, and all the remaining integers pointers their own
212     /// allocation.
213     fn pointer_offset_inbounds(
214         &self,
215         ptr: Scalar,
216         pointee_ty: Ty<'tcx>,
217         offset: i64,
218     ) -> EvalResult<'tcx, Scalar> {
219         if ptr.is_null() {
220             // NULL pointers must only be offset by 0
221             return if offset == 0 {
222                 Ok(ptr)
223             } else {
224                 err!(InvalidNullPointerUsage)
225             };
226         }
227         // FIXME: assuming here that type size is < i64::max_value()
228         let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64;
229         let offset = offset.checked_mul(pointee_size).ok_or_else(|| EvalErrorKind::Overflow(mir::BinOp::Mul))?;
230         // Now let's see what kind of pointer this is
231         if let Scalar::Ptr(ptr) = ptr {
232             // Both old and new pointer must be in-bounds.
233             // (Of the same allocation, but that part is trivial with our representation.)
234             self.memory.check_bounds(ptr, false)?;
235             let ptr = ptr.signed_offset(offset, self)?;
236             self.memory.check_bounds(ptr, false)?;
237             Ok(Scalar::Ptr(ptr))
238         } else {
239             // An integer pointer. They can move around freely, as long as they do not overflow
240             // (which ptr_signed_offset checks).
241             ptr.ptr_signed_offset(offset, self)
242         }
243     }
244 }