]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/interpret/terminator.rs
code review fixes
[rust.git] / src / librustc_mir / interpret / terminator.rs
1 use std::borrow::Cow;
2
3 use rustc::{mir, ty};
4 use rustc::ty::Instance;
5 use rustc::ty::layout::{self, TyLayout, LayoutOf};
6 use syntax::source_map::Span;
7 use rustc_target::spec::abi::Abi;
8
9 use super::{
10     InterpResult, PointerArithmetic, InterpError, Scalar,
11     InterpCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup, FnVal,
12     UnsupportedOpInfo,
13 };
14
15 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
16     #[inline]
17     pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
18         if let Some(target) = target {
19             self.frame_mut().block = target;
20             self.frame_mut().stmt = 0;
21             Ok(())
22         } else {
23             err_ub!(Unreachable)
24         }
25     }
26
27     pub(super) fn eval_terminator(
28         &mut self,
29         terminator: &mir::Terminator<'tcx>,
30     ) -> InterpResult<'tcx> {
31         use rustc::mir::TerminatorKind::*;
32         match terminator.kind {
33             Return => {
34                 self.frame().return_place.map(|r| self.dump_place(*r));
35                 self.pop_stack_frame()?
36             }
37
38             Goto { target } => self.goto_block(Some(target))?,
39
40             SwitchInt {
41                 ref discr,
42                 ref values,
43                 ref targets,
44                 ..
45             } => {
46                 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
47                 trace!("SwitchInt({:?})", *discr);
48
49                 // Branch to the `otherwise` case by default, if no match is found.
50                 let mut target_block = targets[targets.len() - 1];
51
52                 for (index, &const_int) in values.iter().enumerate() {
53                     // Compare using binary_op, to also support pointer values
54                     let const_int = Scalar::from_uint(const_int, discr.layout.size);
55                     let (res, _) = self.binary_op(mir::BinOp::Eq,
56                         discr,
57                         ImmTy::from_scalar(const_int, discr.layout),
58                     )?;
59                     if res.to_bool()? {
60                         target_block = targets[index];
61                         break;
62                     }
63                 }
64
65                 self.goto_block(Some(target_block))?;
66             }
67
68             Call {
69                 ref func,
70                 ref args,
71                 ref destination,
72                 ..
73             } => {
74                 let (dest, ret) = match *destination {
75                     Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
76                     None => (None, None),
77                 };
78
79                 let func = self.eval_operand(func, None)?;
80                 let (fn_val, abi) = match func.layout.ty.sty {
81                     ty::FnPtr(sig) => {
82                         let caller_abi = sig.abi();
83                         let fn_ptr = self.read_scalar(func)?.not_undef()?;
84                         let fn_val = self.memory.get_fn(fn_ptr)?;
85                         (fn_val, caller_abi)
86                     }
87                     ty::FnDef(def_id, substs) => {
88                         let sig = func.layout.ty.fn_sig(*self.tcx);
89                         (FnVal::Instance(self.resolve(def_id, substs)?), sig.abi())
90                     },
91                     _ => {
92                         let msg = format!("can't handle callee of type {:?}", func.layout.ty);
93                         return err!(Unimplemented(msg));
94                     }
95                 };
96                 let args = self.eval_operands(args)?;
97                 self.eval_fn_call(
98                     fn_val,
99                     terminator.source_info.span,
100                     abi,
101                     &args[..],
102                     dest,
103                     ret,
104                 )?;
105             }
106
107             Drop {
108                 ref location,
109                 target,
110                 ..
111             } => {
112                 // FIXME(CTFE): forbid drop in const eval
113                 let place = self.eval_place(location)?;
114                 let ty = place.layout.ty;
115                 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
116
117                 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
118                 self.drop_in_place(
119                     place,
120                     instance,
121                     terminator.source_info.span,
122                     target,
123                 )?;
124             }
125
126             Assert {
127                 ref cond,
128                 expected,
129                 ref msg,
130                 target,
131                 ..
132             } => {
133                 let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
134                     .to_scalar()?.to_bool()?;
135                 if expected == cond_val {
136                     self.goto_block(Some(target))?;
137                 } else {
138                     // Compute error message
139                     use rustc::mir::interpret::PanicInfo::*;
140                     return match msg {
141                         BoundsCheck { ref len, ref index } => {
142                             let len = self.read_immediate(self.eval_operand(len, None)?)
143                                 .expect("can't eval len").to_scalar()?
144                                 .to_bits(self.memory().pointer_size())? as u64;
145                             let index = self.read_immediate(self.eval_operand(index, None)?)
146                                 .expect("can't eval index").to_scalar()?
147                                 .to_bits(self.memory().pointer_size())? as u64;
148                             err_panic!(BoundsCheck { len, index })
149                         }
150                         Overflow(op) =>
151                             err_panic!(Overflow(*op)),
152                         OverflowNeg =>
153                             err_panic!(OverflowNeg),
154                         DivisionByZero =>
155                             err_panic!(DivisionByZero),
156                         RemainderByZero =>
157                             err_panic!(RemainderByZero),
158                         GeneratorResumedAfterReturn =>
159                             err_panic!(GeneratorResumedAfterReturn),
160                         GeneratorResumedAfterPanic =>
161                             err_panic!(GeneratorResumedAfterPanic),
162                         Panic { .. } =>
163                             bug!("`Panic` variant cannot occur in MIR"),
164                     };
165                 }
166             }
167
168             Yield { .. } |
169             GeneratorDrop |
170             DropAndReplace { .. } |
171             Resume |
172             Abort => unimplemented!("{:#?}", terminator.kind),
173             FalseEdges { .. } => bug!("should have been eliminated by\
174                                       `simplify_branches` mir pass"),
175             FalseUnwind { .. } => bug!("should have been eliminated by\
176                                        `simplify_branches` mir pass"),
177             Unreachable => return err_ub!(Unreachable),
178         }
179
180         Ok(())
181     }
182
183     fn check_argument_compat(
184         rust_abi: bool,
185         caller: TyLayout<'tcx>,
186         callee: TyLayout<'tcx>,
187     ) -> bool {
188         if caller.ty == callee.ty {
189             // No question
190             return true;
191         }
192         if !rust_abi {
193             // Don't risk anything
194             return false;
195         }
196         // Compare layout
197         match (&caller.abi, &callee.abi) {
198             // Different valid ranges are okay (once we enforce validity,
199             // that will take care to make it UB to leave the range, just
200             // like for transmute).
201             (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) =>
202                 caller.value == callee.value,
203             (layout::Abi::ScalarPair(ref caller1, ref caller2),
204              layout::Abi::ScalarPair(ref callee1, ref callee2)) =>
205                 caller1.value == callee1.value && caller2.value == callee2.value,
206             // Be conservative
207             _ => false
208         }
209     }
210
211     /// Pass a single argument, checking the types for compatibility.
212     fn pass_argument(
213         &mut self,
214         rust_abi: bool,
215         caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
216         callee_arg: PlaceTy<'tcx, M::PointerTag>,
217     ) -> InterpResult<'tcx> {
218         if rust_abi && callee_arg.layout.is_zst() {
219             // Nothing to do.
220             trace!("Skipping callee ZST");
221             return Ok(());
222         }
223         let caller_arg = caller_arg.next()
224             .ok_or_else(|| InterpError::Unsupported(UnsupportedOpInfo::FunctionArgCountMismatch))?;
225         if rust_abi {
226             debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
227         }
228         // Now, check
229         if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
230             return err!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty));
231         }
232         // We allow some transmutes here
233         self.copy_op_transmute(caller_arg, callee_arg)
234     }
235
236     /// Call this function -- pushing the stack frame and initializing the arguments.
237     fn eval_fn_call(
238         &mut self,
239         fn_val: FnVal<'tcx, M::ExtraFnVal>,
240         span: Span,
241         caller_abi: Abi,
242         args: &[OpTy<'tcx, M::PointerTag>],
243         dest: Option<PlaceTy<'tcx, M::PointerTag>>,
244         ret: Option<mir::BasicBlock>,
245     ) -> InterpResult<'tcx> {
246         trace!("eval_fn_call: {:#?}", fn_val);
247
248         let instance = match fn_val {
249             FnVal::Instance(instance) => instance,
250             FnVal::Other(extra) => {
251                 return M::call_extra_fn(self, extra, args, dest, ret);
252             }
253         };
254
255         match instance.def {
256             ty::InstanceDef::Intrinsic(..) => {
257                 if caller_abi != Abi::RustIntrinsic {
258                     return err!(FunctionAbiMismatch(caller_abi, Abi::RustIntrinsic));
259                 }
260                 // The intrinsic itself cannot diverge, so if we got here without a return
261                 // place... (can happen e.g., for transmute returning `!`)
262                 let dest = match dest {
263                     Some(dest) => dest,
264                     None => return err_ub!(Unreachable)
265                 };
266                 M::call_intrinsic(self, instance, args, dest)?;
267                 // No stack frame gets pushed, the main loop will just act as if the
268                 // call completed.
269                 self.goto_block(ret)?;
270                 self.dump_place(*dest);
271                 Ok(())
272             }
273             ty::InstanceDef::VtableShim(..) |
274             ty::InstanceDef::ClosureOnceShim { .. } |
275             ty::InstanceDef::FnPtrShim(..) |
276             ty::InstanceDef::DropGlue(..) |
277             ty::InstanceDef::CloneShim(..) |
278             ty::InstanceDef::Item(_) => {
279                 // ABI check
280                 {
281                     let callee_abi = {
282                         let instance_ty = instance.ty(*self.tcx);
283                         match instance_ty.sty {
284                             ty::FnDef(..) =>
285                                 instance_ty.fn_sig(*self.tcx).abi(),
286                             ty::Closure(..) => Abi::RustCall,
287                             ty::Generator(..) => Abi::Rust,
288                             _ => bug!("unexpected callee ty: {:?}", instance_ty),
289                         }
290                     };
291                     let normalize_abi = |abi| match abi {
292                         Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
293                             // These are all the same ABI, really.
294                             Abi::Rust,
295                         abi =>
296                             abi,
297                     };
298                     if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
299                         return err!(FunctionAbiMismatch(caller_abi, callee_abi));
300                     }
301                 }
302
303                 // We need MIR for this fn
304                 let body = match M::find_fn(self, instance, args, dest, ret)? {
305                     Some(body) => body,
306                     None => return Ok(()),
307                 };
308
309                 self.push_stack_frame(
310                     instance,
311                     span,
312                     body,
313                     dest,
314                     StackPopCleanup::Goto(ret),
315                 )?;
316
317                 // We want to pop this frame again in case there was an error, to put
318                 // the blame in the right location.  Until the 2018 edition is used in
319                 // the compiler, we have to do this with an immediately invoked function.
320                 let res = (||{
321                     trace!(
322                         "caller ABI: {:?}, args: {:#?}",
323                         caller_abi,
324                         args.iter()
325                             .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
326                             .collect::<Vec<_>>()
327                     );
328                     trace!(
329                         "spread_arg: {:?}, locals: {:#?}",
330                         body.spread_arg,
331                         body.args_iter()
332                             .map(|local|
333                                 (local, self.layout_of_local(self.frame(), local, None).unwrap().ty)
334                             )
335                             .collect::<Vec<_>>()
336                     );
337
338                     // Figure out how to pass which arguments.
339                     // The Rust ABI is special: ZST get skipped.
340                     let rust_abi = match caller_abi {
341                         Abi::Rust | Abi::RustCall => true,
342                         _ => false
343                     };
344                     // We have two iterators: Where the arguments come from,
345                     // and where they go to.
346
347                     // For where they come from: If the ABI is RustCall, we untuple the
348                     // last incoming argument.  These two iterators do not have the same type,
349                     // so to keep the code paths uniform we accept an allocation
350                     // (for RustCall ABI only).
351                     let caller_args : Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
352                         if caller_abi == Abi::RustCall && !args.is_empty() {
353                             // Untuple
354                             let (&untuple_arg, args) = args.split_last().unwrap();
355                             trace!("eval_fn_call: Will pass last argument by untupling");
356                             Cow::from(args.iter().map(|&a| Ok(a))
357                                 .chain((0..untuple_arg.layout.fields.count()).into_iter()
358                                     .map(|i| self.operand_field(untuple_arg, i as u64))
359                                 )
360                                 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
361                         } else {
362                             // Plain arg passing
363                             Cow::from(args)
364                         };
365                     // Skip ZSTs
366                     let mut caller_iter = caller_args.iter()
367                         .filter(|op| !rust_abi || !op.layout.is_zst())
368                         .map(|op| *op);
369
370                     // Now we have to spread them out across the callee's locals,
371                     // taking into account the `spread_arg`.  If we could write
372                     // this is a single iterator (that handles `spread_arg`), then
373                     // `pass_argument` would be the loop body. It takes care to
374                     // not advance `caller_iter` for ZSTs.
375                     let mut locals_iter = body.args_iter();
376                     while let Some(local) = locals_iter.next() {
377                         let dest = self.eval_place(
378                             &mir::Place::from(local)
379                         )?;
380                         if Some(local) == body.spread_arg {
381                             // Must be a tuple
382                             for i in 0..dest.layout.fields.count() {
383                                 let dest = self.place_field(dest, i as u64)?;
384                                 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
385                             }
386                         } else {
387                             // Normal argument
388                             self.pass_argument(rust_abi, &mut caller_iter, dest)?;
389                         }
390                     }
391                     // Now we should have no more caller args
392                     if caller_iter.next().is_some() {
393                         trace!("Caller has passed too many args");
394                         return err!(FunctionArgCountMismatch);
395                     }
396                     // Don't forget to check the return type!
397                     if let Some(caller_ret) = dest {
398                         let callee_ret = self.eval_place(
399                             &mir::Place::RETURN_PLACE
400                         )?;
401                         if !Self::check_argument_compat(
402                             rust_abi,
403                             caller_ret.layout,
404                             callee_ret.layout,
405                         ) {
406                             return err!(
407                                 FunctionRetMismatch(caller_ret.layout.ty, callee_ret.layout.ty)
408                             );
409                         }
410                     } else {
411                         let local = mir::RETURN_PLACE;
412                         let ty = self.frame().body.local_decls[local].ty;
413                         if !self.tcx.is_ty_uninhabited_from_any_module(ty) {
414                             return err!(FunctionRetMismatch(self.tcx.types.never, ty));
415                         }
416                     }
417                     Ok(())
418                 })();
419                 match res {
420                     Err(err) => {
421                         self.stack.pop();
422                         Err(err)
423                     }
424                     Ok(v) => Ok(v)
425                 }
426             }
427             // cannot use the shim here, because that will only result in infinite recursion
428             ty::InstanceDef::Virtual(_, idx) => {
429                 let mut args = args.to_vec();
430                 let ptr_size = self.pointer_size();
431                 // We have to implement all "object safe receivers".  Currently we
432                 // support built-in pointers (&, &mut, Box) as well as unsized-self.  We do
433                 // not yet support custom self types.
434                 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
435                 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
436                     Some(_) => {
437                         // Built-in pointer.
438                         self.deref_operand(args[0])?
439                     }
440                     None => {
441                         // Unsized self.
442                         args[0].assert_mem_place()
443                     }
444                 };
445                 // Find and consult vtable
446                 let vtable = receiver_place.vtable();
447                 let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
448                 let vtable_slot = self.memory.check_ptr_access(
449                     vtable_slot,
450                     ptr_size,
451                     self.tcx.data_layout.pointer_align.abi,
452                 )?.expect("cannot be a ZST");
453                 let fn_ptr = self.memory.get(vtable_slot.alloc_id)?
454                     .read_ptr_sized(self, vtable_slot)?.not_undef()?;
455                 let drop_fn = self.memory.get_fn(fn_ptr)?;
456
457                 // `*mut receiver_place.layout.ty` is almost the layout that we
458                 // want for args[0]: We have to project to field 0 because we want
459                 // a thin pointer.
460                 assert!(receiver_place.layout.is_unsized());
461                 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
462                 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
463                 // Adjust receiver argument.
464                 args[0] = OpTy::from(ImmTy {
465                     layout: this_receiver_ptr,
466                     imm: Immediate::Scalar(receiver_place.ptr.into())
467                 });
468                 trace!("Patched self operand to {:#?}", args[0]);
469                 // recurse with concrete function
470                 self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret)
471             }
472         }
473     }
474
475     fn drop_in_place(
476         &mut self,
477         place: PlaceTy<'tcx, M::PointerTag>,
478         instance: ty::Instance<'tcx>,
479         span: Span,
480         target: mir::BasicBlock,
481     ) -> InterpResult<'tcx> {
482         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
483         // We take the address of the object.  This may well be unaligned, which is fine
484         // for us here.  However, unaligned accesses will probably make the actual drop
485         // implementation fail -- a problem shared by rustc.
486         let place = self.force_allocation(place)?;
487
488         let (instance, place) = match place.layout.ty.sty {
489             ty::Dynamic(..) => {
490                 // Dropping a trait object.
491                 self.unpack_dyn_trait(place)?
492             }
493             _ => (instance, place),
494         };
495
496         let arg = ImmTy {
497             imm: place.to_ref(),
498             layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
499         };
500
501         let ty = self.tcx.mk_unit(); // return type is ()
502         let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
503
504         self.eval_fn_call(
505             FnVal::Instance(instance),
506             span,
507             Abi::Rust,
508             &[arg.into()],
509             Some(dest.into()),
510             Some(target),
511         )
512     }
513 }