]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/interpret/terminator.rs
Rollup merge of #59711 - GuillaumeGomez:substring-test, r=QuietMisdreavus
[rust.git] / src / librustc_mir / interpret / terminator.rs
1 use std::borrow::Cow;
2
3 use rustc::{mir, ty};
4 use rustc::ty::layout::{self, TyLayout, LayoutOf};
5 use syntax::source_map::Span;
6 use rustc_target::spec::abi::Abi;
7
8 use rustc::mir::interpret::{EvalResult, PointerArithmetic, InterpError, Scalar};
9 use super::{
10     InterpretCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
11 };
12
13 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
14     #[inline]
15     pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> EvalResult<'tcx> {
16         if let Some(target) = target {
17             self.frame_mut().block = target;
18             self.frame_mut().stmt = 0;
19             Ok(())
20         } else {
21             err!(Unreachable)
22         }
23     }
24
25     pub(super) fn eval_terminator(
26         &mut self,
27         terminator: &mir::Terminator<'tcx>,
28     ) -> EvalResult<'tcx> {
29         use rustc::mir::TerminatorKind::*;
30         match terminator.kind {
31             Return => {
32                 self.frame().return_place.map(|r| self.dump_place(*r));
33                 self.pop_stack_frame()?
34             }
35
36             Goto { target } => self.goto_block(Some(target))?,
37
38             SwitchInt {
39                 ref discr,
40                 ref values,
41                 ref targets,
42                 ..
43             } => {
44                 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
45                 trace!("SwitchInt({:?})", *discr);
46
47                 // Branch to the `otherwise` case by default, if no match is found.
48                 let mut target_block = targets[targets.len() - 1];
49
50                 for (index, &const_int) in values.iter().enumerate() {
51                     // Compare using binary_op, to also support pointer values
52                     let const_int = Scalar::from_uint(const_int, discr.layout.size);
53                     let (res, _) = self.binary_op(mir::BinOp::Eq,
54                         discr,
55                         ImmTy::from_scalar(const_int, discr.layout),
56                     )?;
57                     if res.to_bool()? {
58                         target_block = targets[index];
59                         break;
60                     }
61                 }
62
63                 self.goto_block(Some(target_block))?;
64             }
65
66             Call {
67                 ref func,
68                 ref args,
69                 ref destination,
70                 ..
71             } => {
72                 let (dest, ret) = match *destination {
73                     Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
74                     None => (None, None),
75                 };
76
77                 let func = self.eval_operand(func, None)?;
78                 let (fn_def, abi) = match func.layout.ty.sty {
79                     ty::FnPtr(sig) => {
80                         let caller_abi = sig.abi();
81                         let fn_ptr = self.read_scalar(func)?.to_ptr()?;
82                         let instance = self.memory.get_fn(fn_ptr)?;
83                         (instance, caller_abi)
84                     }
85                     ty::FnDef(def_id, substs) => {
86                         let sig = func.layout.ty.fn_sig(*self.tcx);
87                         (self.resolve(def_id, substs)?, sig.abi())
88                     },
89                     _ => {
90                         let msg = format!("can't handle callee of type {:?}", func.layout.ty);
91                         return err!(Unimplemented(msg));
92                     }
93                 };
94                 let args = self.eval_operands(args)?;
95                 self.eval_fn_call(
96                     fn_def,
97                     terminator.source_info.span,
98                     abi,
99                     &args[..],
100                     dest,
101                     ret,
102                 )?;
103             }
104
105             Drop {
106                 ref location,
107                 target,
108                 ..
109             } => {
110                 // FIXME(CTFE): forbid drop in const eval
111                 let place = self.eval_place(location)?;
112                 let ty = place.layout.ty;
113                 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
114
115                 let instance = crate::monomorphize::resolve_drop_in_place(*self.tcx, ty);
116                 self.drop_in_place(
117                     place,
118                     instance,
119                     terminator.source_info.span,
120                     target,
121                 )?;
122             }
123
124             Assert {
125                 ref cond,
126                 expected,
127                 ref msg,
128                 target,
129                 ..
130             } => {
131                 let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
132                     .to_scalar()?.to_bool()?;
133                 if expected == cond_val {
134                     self.goto_block(Some(target))?;
135                 } else {
136                     // Compute error message
137                     use rustc::mir::interpret::InterpError::*;
138                     return match *msg {
139                         BoundsCheck { ref len, ref index } => {
140                             let len = self.read_immediate(self.eval_operand(len, None)?)
141                                 .expect("can't eval len").to_scalar()?
142                                 .to_bits(self.memory().pointer_size())? as u64;
143                             let index = self.read_immediate(self.eval_operand(index, None)?)
144                                 .expect("can't eval index").to_scalar()?
145                                 .to_bits(self.memory().pointer_size())? as u64;
146                             err!(BoundsCheck { len, index })
147                         }
148                         Overflow(op) => Err(Overflow(op).into()),
149                         OverflowNeg => Err(OverflowNeg.into()),
150                         DivisionByZero => Err(DivisionByZero.into()),
151                         RemainderByZero => Err(RemainderByZero.into()),
152                         GeneratorResumedAfterReturn |
153                         GeneratorResumedAfterPanic => unimplemented!(),
154                         _ => bug!(),
155                     };
156                 }
157             }
158
159             Yield { .. } |
160             GeneratorDrop |
161             DropAndReplace { .. } |
162             Resume |
163             Abort => unimplemented!("{:#?}", terminator.kind),
164             FalseEdges { .. } => bug!("should have been eliminated by\
165                                       `simplify_branches` mir pass"),
166             FalseUnwind { .. } => bug!("should have been eliminated by\
167                                        `simplify_branches` mir pass"),
168             Unreachable => return err!(Unreachable),
169         }
170
171         Ok(())
172     }
173
174     fn check_argument_compat(
175         rust_abi: bool,
176         caller: TyLayout<'tcx>,
177         callee: TyLayout<'tcx>,
178     ) -> bool {
179         if caller.ty == callee.ty {
180             // No question
181             return true;
182         }
183         if !rust_abi {
184             // Don't risk anything
185             return false;
186         }
187         // Compare layout
188         match (&caller.abi, &callee.abi) {
189             // Different valid ranges are okay (once we enforce validity,
190             // that will take care to make it UB to leave the range, just
191             // like for transmute).
192             (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) =>
193                 caller.value == callee.value,
194             (layout::Abi::ScalarPair(ref caller1, ref caller2),
195              layout::Abi::ScalarPair(ref callee1, ref callee2)) =>
196                 caller1.value == callee1.value && caller2.value == callee2.value,
197             // Be conservative
198             _ => false
199         }
200     }
201
202     /// Pass a single argument, checking the types for compatibility.
203     fn pass_argument(
204         &mut self,
205         rust_abi: bool,
206         caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
207         callee_arg: PlaceTy<'tcx, M::PointerTag>,
208     ) -> EvalResult<'tcx> {
209         if rust_abi && callee_arg.layout.is_zst() {
210             // Nothing to do.
211             trace!("Skipping callee ZST");
212             return Ok(());
213         }
214         let caller_arg = caller_arg.next()
215             .ok_or_else(|| InterpError::FunctionArgCountMismatch)?;
216         if rust_abi {
217             debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
218         }
219         // Now, check
220         if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
221             return err!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty));
222         }
223         // We allow some transmutes here
224         self.copy_op_transmute(caller_arg, callee_arg)
225     }
226
227     /// Call this function -- pushing the stack frame and initializing the arguments.
228     fn eval_fn_call(
229         &mut self,
230         instance: ty::Instance<'tcx>,
231         span: Span,
232         caller_abi: Abi,
233         args: &[OpTy<'tcx, M::PointerTag>],
234         dest: Option<PlaceTy<'tcx, M::PointerTag>>,
235         ret: Option<mir::BasicBlock>,
236     ) -> EvalResult<'tcx> {
237         trace!("eval_fn_call: {:#?}", instance);
238
239         match instance.def {
240             ty::InstanceDef::Intrinsic(..) => {
241                 if caller_abi != Abi::RustIntrinsic {
242                     return err!(FunctionAbiMismatch(caller_abi, Abi::RustIntrinsic));
243                 }
244                 // The intrinsic itself cannot diverge, so if we got here without a return
245                 // place... (can happen e.g., for transmute returning `!`)
246                 let dest = match dest {
247                     Some(dest) => dest,
248                     None => return err!(Unreachable)
249                 };
250                 M::call_intrinsic(self, instance, args, dest)?;
251                 // No stack frame gets pushed, the main loop will just act as if the
252                 // call completed.
253                 self.goto_block(ret)?;
254                 self.dump_place(*dest);
255                 Ok(())
256             }
257             ty::InstanceDef::VtableShim(..) |
258             ty::InstanceDef::ClosureOnceShim { .. } |
259             ty::InstanceDef::FnPtrShim(..) |
260             ty::InstanceDef::DropGlue(..) |
261             ty::InstanceDef::CloneShim(..) |
262             ty::InstanceDef::Item(_) => {
263                 // ABI check
264                 {
265                     let callee_abi = {
266                         let instance_ty = instance.ty(*self.tcx);
267                         match instance_ty.sty {
268                             ty::FnDef(..) =>
269                                 instance_ty.fn_sig(*self.tcx).abi(),
270                             ty::Closure(..) => Abi::RustCall,
271                             ty::Generator(..) => Abi::Rust,
272                             _ => bug!("unexpected callee ty: {:?}", instance_ty),
273                         }
274                     };
275                     // Rust and RustCall are compatible
276                     let normalize_abi = |abi| if abi == Abi::RustCall { Abi::Rust } else { abi };
277                     if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
278                         return err!(FunctionAbiMismatch(caller_abi, callee_abi));
279                     }
280                 }
281
282                 // We need MIR for this fn
283                 let mir = match M::find_fn(self, instance, args, dest, ret)? {
284                     Some(mir) => mir,
285                     None => return Ok(()),
286                 };
287
288                 self.push_stack_frame(
289                     instance,
290                     span,
291                     mir,
292                     dest,
293                     StackPopCleanup::Goto(ret),
294                 )?;
295
296                 // We want to pop this frame again in case there was an error, to put
297                 // the blame in the right location.  Until the 2018 edition is used in
298                 // the compiler, we have to do this with an immediately invoked function.
299                 let res = (||{
300                     trace!(
301                         "caller ABI: {:?}, args: {:#?}",
302                         caller_abi,
303                         args.iter()
304                             .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
305                             .collect::<Vec<_>>()
306                     );
307                     trace!(
308                         "spread_arg: {:?}, locals: {:#?}",
309                         mir.spread_arg,
310                         mir.args_iter()
311                             .map(|local|
312                                 (local, self.layout_of_local(self.frame(), local, None).unwrap().ty)
313                             )
314                             .collect::<Vec<_>>()
315                     );
316
317                     // Figure out how to pass which arguments.
318                     // The Rust ABI is special: ZST get skipped.
319                     let rust_abi = match caller_abi {
320                         Abi::Rust | Abi::RustCall => true,
321                         _ => false
322                     };
323                     // We have two iterators: Where the arguments come from,
324                     // and where they go to.
325
326                     // For where they come from: If the ABI is RustCall, we untuple the
327                     // last incoming argument.  These two iterators do not have the same type,
328                     // so to keep the code paths uniform we accept an allocation
329                     // (for RustCall ABI only).
330                     let caller_args : Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
331                         if caller_abi == Abi::RustCall && !args.is_empty() {
332                             // Untuple
333                             let (&untuple_arg, args) = args.split_last().unwrap();
334                             trace!("eval_fn_call: Will pass last argument by untupling");
335                             Cow::from(args.iter().map(|&a| Ok(a))
336                                 .chain((0..untuple_arg.layout.fields.count()).into_iter()
337                                     .map(|i| self.operand_field(untuple_arg, i as u64))
338                                 )
339                                 .collect::<EvalResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
340                         } else {
341                             // Plain arg passing
342                             Cow::from(args)
343                         };
344                     // Skip ZSTs
345                     let mut caller_iter = caller_args.iter()
346                         .filter(|op| !rust_abi || !op.layout.is_zst())
347                         .map(|op| *op);
348
349                     // Now we have to spread them out across the callee's locals,
350                     // taking into account the `spread_arg`.  If we could write
351                     // this is a single iterator (that handles `spread_arg`), then
352                     // `pass_argument` would be the loop body. It takes care to
353                     // not advance `caller_iter` for ZSTs.
354                     let mut locals_iter = mir.args_iter();
355                     while let Some(local) = locals_iter.next() {
356                         let dest = self.eval_place(
357                             &mir::Place::Base(mir::PlaceBase::Local(local))
358                         )?;
359                         if Some(local) == mir.spread_arg {
360                             // Must be a tuple
361                             for i in 0..dest.layout.fields.count() {
362                                 let dest = self.place_field(dest, i as u64)?;
363                                 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
364                             }
365                         } else {
366                             // Normal argument
367                             self.pass_argument(rust_abi, &mut caller_iter, dest)?;
368                         }
369                     }
370                     // Now we should have no more caller args
371                     if caller_iter.next().is_some() {
372                         trace!("Caller has passed too many args");
373                         return err!(FunctionArgCountMismatch);
374                     }
375                     // Don't forget to check the return type!
376                     if let Some(caller_ret) = dest {
377                         let callee_ret = self.eval_place(
378                             &mir::Place::RETURN_PLACE
379                         )?;
380                         if !Self::check_argument_compat(
381                             rust_abi,
382                             caller_ret.layout,
383                             callee_ret.layout,
384                         ) {
385                             return err!(FunctionRetMismatch(
386                                 caller_ret.layout.ty, callee_ret.layout.ty
387                             ));
388                         }
389                     } else {
390                         let callee_layout =
391                             self.layout_of_local(self.frame(), mir::RETURN_PLACE, None)?;
392                         if !callee_layout.abi.is_uninhabited() {
393                             return err!(FunctionRetMismatch(
394                                 self.tcx.types.never, callee_layout.ty
395                             ));
396                         }
397                     }
398                     Ok(())
399                 })();
400                 match res {
401                     Err(err) => {
402                         self.stack.pop();
403                         Err(err)
404                     }
405                     Ok(v) => Ok(v)
406                 }
407             }
408             // cannot use the shim here, because that will only result in infinite recursion
409             ty::InstanceDef::Virtual(_, idx) => {
410                 let mut args = args.to_vec();
411                 let ptr_size = self.pointer_size();
412                 // We have to implement all "object safe receivers".  Currently we
413                 // support built-in pointers (&, &mut, Box) as well as unsized-self.  We do
414                 // not yet support custom self types.
415                 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
416                 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
417                     Some(_) => {
418                         // Built-in pointer.
419                         self.deref_operand(args[0])?
420                     }
421                     None => {
422                         // Unsized self.
423                         args[0].to_mem_place()
424                     }
425                 };
426                 // Find and consult vtable
427                 let vtable = receiver_place.vtable()?;
428                 self.memory.check_align(vtable.into(), self.tcx.data_layout.pointer_align.abi)?;
429                 let fn_ptr = self.memory.get(vtable.alloc_id)?.read_ptr_sized(
430                     self,
431                     vtable.offset(ptr_size * (idx as u64 + 3), self)?,
432                 )?.to_ptr()?;
433                 let instance = self.memory.get_fn(fn_ptr)?;
434
435                 // `*mut receiver_place.layout.ty` is almost the layout that we
436                 // want for args[0]: We have to project to field 0 because we want
437                 // a thin pointer.
438                 assert!(receiver_place.layout.is_unsized());
439                 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
440                 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
441                 // Adjust receiver argument.
442                 args[0] = OpTy::from(ImmTy {
443                     layout: this_receiver_ptr,
444                     imm: Immediate::Scalar(receiver_place.ptr.into())
445                 });
446                 trace!("Patched self operand to {:#?}", args[0]);
447                 // recurse with concrete function
448                 self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
449             }
450         }
451     }
452
453     fn drop_in_place(
454         &mut self,
455         place: PlaceTy<'tcx, M::PointerTag>,
456         instance: ty::Instance<'tcx>,
457         span: Span,
458         target: mir::BasicBlock,
459     ) -> EvalResult<'tcx> {
460         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
461         // We take the address of the object.  This may well be unaligned, which is fine
462         // for us here.  However, unaligned accesses will probably make the actual drop
463         // implementation fail -- a problem shared by rustc.
464         let place = self.force_allocation(place)?;
465
466         let (instance, place) = match place.layout.ty.sty {
467             ty::Dynamic(..) => {
468                 // Dropping a trait object.
469                 self.unpack_dyn_trait(place)?
470             }
471             _ => (instance, place),
472         };
473
474         let arg = ImmTy {
475             imm: place.to_ref(),
476             layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
477         };
478
479         let ty = self.tcx.mk_unit(); // return type is ()
480         let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
481
482         self.eval_fn_call(
483             instance,
484             span,
485             Abi::Rust,
486             &[arg.into()],
487             Some(dest.into()),
488             Some(target),
489         )
490     }
491 }