]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/interpret/terminator.rs
rename EvalResult -> InterpResult and EvalError -> InterpErrorInfo
[rust.git] / src / librustc_mir / interpret / terminator.rs
1 use std::borrow::Cow;
2
3 use rustc::{mir, ty};
4 use rustc::ty::Instance;
5 use rustc::ty::layout::{self, TyLayout, LayoutOf};
6 use syntax::source_map::Span;
7 use rustc_target::spec::abi::Abi;
8
9 use rustc::mir::interpret::{InterpResult, PointerArithmetic, InterpError, Scalar};
10 use super::{
11     InterpretCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
12 };
13
14 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
15     #[inline]
16     pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
17         if let Some(target) = target {
18             self.frame_mut().block = target;
19             self.frame_mut().stmt = 0;
20             Ok(())
21         } else {
22             err!(Unreachable)
23         }
24     }
25
26     pub(super) fn eval_terminator(
27         &mut self,
28         terminator: &mir::Terminator<'tcx>,
29     ) -> InterpResult<'tcx> {
30         use rustc::mir::TerminatorKind::*;
31         match terminator.kind {
32             Return => {
33                 self.frame().return_place.map(|r| self.dump_place(*r));
34                 self.pop_stack_frame()?
35             }
36
37             Goto { target } => self.goto_block(Some(target))?,
38
39             SwitchInt {
40                 ref discr,
41                 ref values,
42                 ref targets,
43                 ..
44             } => {
45                 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
46                 trace!("SwitchInt({:?})", *discr);
47
48                 // Branch to the `otherwise` case by default, if no match is found.
49                 let mut target_block = targets[targets.len() - 1];
50
51                 for (index, &const_int) in values.iter().enumerate() {
52                     // Compare using binary_op, to also support pointer values
53                     let const_int = Scalar::from_uint(const_int, discr.layout.size);
54                     let (res, _) = self.binary_op(mir::BinOp::Eq,
55                         discr,
56                         ImmTy::from_scalar(const_int, discr.layout),
57                     )?;
58                     if res.to_bool()? {
59                         target_block = targets[index];
60                         break;
61                     }
62                 }
63
64                 self.goto_block(Some(target_block))?;
65             }
66
67             Call {
68                 ref func,
69                 ref args,
70                 ref destination,
71                 ..
72             } => {
73                 let (dest, ret) = match *destination {
74                     Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
75                     None => (None, None),
76                 };
77
78                 let func = self.eval_operand(func, None)?;
79                 let (fn_def, abi) = match func.layout.ty.sty {
80                     ty::FnPtr(sig) => {
81                         let caller_abi = sig.abi();
82                         let fn_ptr = self.read_scalar(func)?.to_ptr()?;
83                         let instance = self.memory.get_fn(fn_ptr)?;
84                         (instance, caller_abi)
85                     }
86                     ty::FnDef(def_id, substs) => {
87                         let sig = func.layout.ty.fn_sig(*self.tcx);
88                         (self.resolve(def_id, substs)?, sig.abi())
89                     },
90                     _ => {
91                         let msg = format!("can't handle callee of type {:?}", func.layout.ty);
92                         return err!(Unimplemented(msg));
93                     }
94                 };
95                 let args = self.eval_operands(args)?;
96                 self.eval_fn_call(
97                     fn_def,
98                     terminator.source_info.span,
99                     abi,
100                     &args[..],
101                     dest,
102                     ret,
103                 )?;
104             }
105
106             Drop {
107                 ref location,
108                 target,
109                 ..
110             } => {
111                 // FIXME(CTFE): forbid drop in const eval
112                 let place = self.eval_place(location)?;
113                 let ty = place.layout.ty;
114                 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
115
116                 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
117                 self.drop_in_place(
118                     place,
119                     instance,
120                     terminator.source_info.span,
121                     target,
122                 )?;
123             }
124
125             Assert {
126                 ref cond,
127                 expected,
128                 ref msg,
129                 target,
130                 ..
131             } => {
132                 let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
133                     .to_scalar()?.to_bool()?;
134                 if expected == cond_val {
135                     self.goto_block(Some(target))?;
136                 } else {
137                     // Compute error message
138                     use rustc::mir::interpret::InterpError::*;
139                     return match *msg {
140                         BoundsCheck { ref len, ref index } => {
141                             let len = self.read_immediate(self.eval_operand(len, None)?)
142                                 .expect("can't eval len").to_scalar()?
143                                 .to_bits(self.memory().pointer_size())? as u64;
144                             let index = self.read_immediate(self.eval_operand(index, None)?)
145                                 .expect("can't eval index").to_scalar()?
146                                 .to_bits(self.memory().pointer_size())? as u64;
147                             err!(BoundsCheck { len, index })
148                         }
149                         Overflow(op) => Err(Overflow(op).into()),
150                         OverflowNeg => Err(OverflowNeg.into()),
151                         DivisionByZero => Err(DivisionByZero.into()),
152                         RemainderByZero => Err(RemainderByZero.into()),
153                         GeneratorResumedAfterReturn |
154                         GeneratorResumedAfterPanic => unimplemented!(),
155                         _ => bug!(),
156                     };
157                 }
158             }
159
160             Yield { .. } |
161             GeneratorDrop |
162             DropAndReplace { .. } |
163             Resume |
164             Abort => unimplemented!("{:#?}", terminator.kind),
165             FalseEdges { .. } => bug!("should have been eliminated by\
166                                       `simplify_branches` mir pass"),
167             FalseUnwind { .. } => bug!("should have been eliminated by\
168                                        `simplify_branches` mir pass"),
169             Unreachable => return err!(Unreachable),
170         }
171
172         Ok(())
173     }
174
175     fn check_argument_compat(
176         rust_abi: bool,
177         caller: TyLayout<'tcx>,
178         callee: TyLayout<'tcx>,
179     ) -> bool {
180         if caller.ty == callee.ty {
181             // No question
182             return true;
183         }
184         if !rust_abi {
185             // Don't risk anything
186             return false;
187         }
188         // Compare layout
189         match (&caller.abi, &callee.abi) {
190             // Different valid ranges are okay (once we enforce validity,
191             // that will take care to make it UB to leave the range, just
192             // like for transmute).
193             (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) =>
194                 caller.value == callee.value,
195             (layout::Abi::ScalarPair(ref caller1, ref caller2),
196              layout::Abi::ScalarPair(ref callee1, ref callee2)) =>
197                 caller1.value == callee1.value && caller2.value == callee2.value,
198             // Be conservative
199             _ => false
200         }
201     }
202
203     /// Pass a single argument, checking the types for compatibility.
204     fn pass_argument(
205         &mut self,
206         rust_abi: bool,
207         caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
208         callee_arg: PlaceTy<'tcx, M::PointerTag>,
209     ) -> InterpResult<'tcx> {
210         if rust_abi && callee_arg.layout.is_zst() {
211             // Nothing to do.
212             trace!("Skipping callee ZST");
213             return Ok(());
214         }
215         let caller_arg = caller_arg.next()
216             .ok_or_else(|| InterpError::FunctionArgCountMismatch)?;
217         if rust_abi {
218             debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
219         }
220         // Now, check
221         if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
222             return err!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty));
223         }
224         // We allow some transmutes here
225         self.copy_op_transmute(caller_arg, callee_arg)
226     }
227
228     /// Call this function -- pushing the stack frame and initializing the arguments.
229     fn eval_fn_call(
230         &mut self,
231         instance: ty::Instance<'tcx>,
232         span: Span,
233         caller_abi: Abi,
234         args: &[OpTy<'tcx, M::PointerTag>],
235         dest: Option<PlaceTy<'tcx, M::PointerTag>>,
236         ret: Option<mir::BasicBlock>,
237     ) -> InterpResult<'tcx> {
238         trace!("eval_fn_call: {:#?}", instance);
239
240         match instance.def {
241             ty::InstanceDef::Intrinsic(..) => {
242                 if caller_abi != Abi::RustIntrinsic {
243                     return err!(FunctionAbiMismatch(caller_abi, Abi::RustIntrinsic));
244                 }
245                 // The intrinsic itself cannot diverge, so if we got here without a return
246                 // place... (can happen e.g., for transmute returning `!`)
247                 let dest = match dest {
248                     Some(dest) => dest,
249                     None => return err!(Unreachable)
250                 };
251                 M::call_intrinsic(self, instance, args, dest)?;
252                 // No stack frame gets pushed, the main loop will just act as if the
253                 // call completed.
254                 self.goto_block(ret)?;
255                 self.dump_place(*dest);
256                 Ok(())
257             }
258             ty::InstanceDef::VtableShim(..) |
259             ty::InstanceDef::ClosureOnceShim { .. } |
260             ty::InstanceDef::FnPtrShim(..) |
261             ty::InstanceDef::DropGlue(..) |
262             ty::InstanceDef::CloneShim(..) |
263             ty::InstanceDef::Item(_) => {
264                 // ABI check
265                 {
266                     let callee_abi = {
267                         let instance_ty = instance.ty(*self.tcx);
268                         match instance_ty.sty {
269                             ty::FnDef(..) =>
270                                 instance_ty.fn_sig(*self.tcx).abi(),
271                             ty::Closure(..) => Abi::RustCall,
272                             ty::Generator(..) => Abi::Rust,
273                             _ => bug!("unexpected callee ty: {:?}", instance_ty),
274                         }
275                     };
276                     // Rust and RustCall are compatible
277                     let normalize_abi = |abi| if abi == Abi::RustCall { Abi::Rust } else { abi };
278                     if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
279                         return err!(FunctionAbiMismatch(caller_abi, callee_abi));
280                     }
281                 }
282
283                 // We need MIR for this fn
284                 let mir = match M::find_fn(self, instance, args, dest, ret)? {
285                     Some(mir) => mir,
286                     None => return Ok(()),
287                 };
288
289                 self.push_stack_frame(
290                     instance,
291                     span,
292                     mir,
293                     dest,
294                     StackPopCleanup::Goto(ret),
295                 )?;
296
297                 // We want to pop this frame again in case there was an error, to put
298                 // the blame in the right location.  Until the 2018 edition is used in
299                 // the compiler, we have to do this with an immediately invoked function.
300                 let res = (||{
301                     trace!(
302                         "caller ABI: {:?}, args: {:#?}",
303                         caller_abi,
304                         args.iter()
305                             .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
306                             .collect::<Vec<_>>()
307                     );
308                     trace!(
309                         "spread_arg: {:?}, locals: {:#?}",
310                         mir.spread_arg,
311                         mir.args_iter()
312                             .map(|local|
313                                 (local, self.layout_of_local(self.frame(), local, None).unwrap().ty)
314                             )
315                             .collect::<Vec<_>>()
316                     );
317
318                     // Figure out how to pass which arguments.
319                     // The Rust ABI is special: ZST get skipped.
320                     let rust_abi = match caller_abi {
321                         Abi::Rust | Abi::RustCall => true,
322                         _ => false
323                     };
324                     // We have two iterators: Where the arguments come from,
325                     // and where they go to.
326
327                     // For where they come from: If the ABI is RustCall, we untuple the
328                     // last incoming argument.  These two iterators do not have the same type,
329                     // so to keep the code paths uniform we accept an allocation
330                     // (for RustCall ABI only).
331                     let caller_args : Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
332                         if caller_abi == Abi::RustCall && !args.is_empty() {
333                             // Untuple
334                             let (&untuple_arg, args) = args.split_last().unwrap();
335                             trace!("eval_fn_call: Will pass last argument by untupling");
336                             Cow::from(args.iter().map(|&a| Ok(a))
337                                 .chain((0..untuple_arg.layout.fields.count()).into_iter()
338                                     .map(|i| self.operand_field(untuple_arg, i as u64))
339                                 )
340                                 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
341                         } else {
342                             // Plain arg passing
343                             Cow::from(args)
344                         };
345                     // Skip ZSTs
346                     let mut caller_iter = caller_args.iter()
347                         .filter(|op| !rust_abi || !op.layout.is_zst())
348                         .map(|op| *op);
349
350                     // Now we have to spread them out across the callee's locals,
351                     // taking into account the `spread_arg`.  If we could write
352                     // this is a single iterator (that handles `spread_arg`), then
353                     // `pass_argument` would be the loop body. It takes care to
354                     // not advance `caller_iter` for ZSTs.
355                     let mut locals_iter = mir.args_iter();
356                     while let Some(local) = locals_iter.next() {
357                         let dest = self.eval_place(
358                             &mir::Place::Base(mir::PlaceBase::Local(local))
359                         )?;
360                         if Some(local) == mir.spread_arg {
361                             // Must be a tuple
362                             for i in 0..dest.layout.fields.count() {
363                                 let dest = self.place_field(dest, i as u64)?;
364                                 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
365                             }
366                         } else {
367                             // Normal argument
368                             self.pass_argument(rust_abi, &mut caller_iter, dest)?;
369                         }
370                     }
371                     // Now we should have no more caller args
372                     if caller_iter.next().is_some() {
373                         trace!("Caller has passed too many args");
374                         return err!(FunctionArgCountMismatch);
375                     }
376                     // Don't forget to check the return type!
377                     if let Some(caller_ret) = dest {
378                         let callee_ret = self.eval_place(
379                             &mir::Place::RETURN_PLACE
380                         )?;
381                         if !Self::check_argument_compat(
382                             rust_abi,
383                             caller_ret.layout,
384                             callee_ret.layout,
385                         ) {
386                             return err!(FunctionRetMismatch(
387                                 caller_ret.layout.ty, callee_ret.layout.ty
388                             ));
389                         }
390                     } else {
391                         let callee_layout =
392                             self.layout_of_local(self.frame(), mir::RETURN_PLACE, None)?;
393                         if !callee_layout.abi.is_uninhabited() {
394                             return err!(FunctionRetMismatch(
395                                 self.tcx.types.never, callee_layout.ty
396                             ));
397                         }
398                     }
399                     Ok(())
400                 })();
401                 match res {
402                     Err(err) => {
403                         self.stack.pop();
404                         Err(err)
405                     }
406                     Ok(v) => Ok(v)
407                 }
408             }
409             // cannot use the shim here, because that will only result in infinite recursion
410             ty::InstanceDef::Virtual(_, idx) => {
411                 let mut args = args.to_vec();
412                 let ptr_size = self.pointer_size();
413                 // We have to implement all "object safe receivers".  Currently we
414                 // support built-in pointers (&, &mut, Box) as well as unsized-self.  We do
415                 // not yet support custom self types.
416                 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
417                 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
418                     Some(_) => {
419                         // Built-in pointer.
420                         self.deref_operand(args[0])?
421                     }
422                     None => {
423                         // Unsized self.
424                         args[0].to_mem_place()
425                     }
426                 };
427                 // Find and consult vtable
428                 let vtable = receiver_place.vtable()?;
429                 self.memory.check_align(vtable.into(), self.tcx.data_layout.pointer_align.abi)?;
430                 let fn_ptr = self.memory.get(vtable.alloc_id)?.read_ptr_sized(
431                     self,
432                     vtable.offset(ptr_size * (idx as u64 + 3), self)?,
433                 )?.to_ptr()?;
434                 let instance = self.memory.get_fn(fn_ptr)?;
435
436                 // `*mut receiver_place.layout.ty` is almost the layout that we
437                 // want for args[0]: We have to project to field 0 because we want
438                 // a thin pointer.
439                 assert!(receiver_place.layout.is_unsized());
440                 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
441                 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
442                 // Adjust receiver argument.
443                 args[0] = OpTy::from(ImmTy {
444                     layout: this_receiver_ptr,
445                     imm: Immediate::Scalar(receiver_place.ptr.into())
446                 });
447                 trace!("Patched self operand to {:#?}", args[0]);
448                 // recurse with concrete function
449                 self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
450             }
451         }
452     }
453
454     fn drop_in_place(
455         &mut self,
456         place: PlaceTy<'tcx, M::PointerTag>,
457         instance: ty::Instance<'tcx>,
458         span: Span,
459         target: mir::BasicBlock,
460     ) -> InterpResult<'tcx> {
461         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
462         // We take the address of the object.  This may well be unaligned, which is fine
463         // for us here.  However, unaligned accesses will probably make the actual drop
464         // implementation fail -- a problem shared by rustc.
465         let place = self.force_allocation(place)?;
466
467         let (instance, place) = match place.layout.ty.sty {
468             ty::Dynamic(..) => {
469                 // Dropping a trait object.
470                 self.unpack_dyn_trait(place)?
471             }
472             _ => (instance, place),
473         };
474
475         let arg = ImmTy {
476             imm: place.to_ref(),
477             layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
478         };
479
480         let ty = self.tcx.mk_unit(); // return type is ()
481         let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
482
483         self.eval_fn_call(
484             instance,
485             span,
486             Abi::Rust,
487             &[arg.into()],
488             Some(dest.into()),
489             Some(target),
490         )
491     }
492 }