]> git.lizzy.rs Git - rust.git/blob - src/librustc_mir/interpret/terminator.rs
Auto merge of #69586 - petrochenkov:unmerge, r=Centril
[rust.git] / src / librustc_mir / interpret / terminator.rs
1 use std::borrow::Cow;
2
3 use rustc::ty::layout::{self, LayoutOf, TyLayout};
4 use rustc::ty::Instance;
5 use rustc::{mir, ty};
6 use rustc_span::source_map::Span;
7 use rustc_target::spec::abi::Abi;
8
9 use super::{
10     FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, StackPopCleanup,
11 };
12
13 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
14     pub(super) fn eval_terminator(
15         &mut self,
16         terminator: &mir::Terminator<'tcx>,
17     ) -> InterpResult<'tcx> {
18         use rustc::mir::TerminatorKind::*;
19         match terminator.kind {
20             Return => {
21                 self.frame().return_place.map(|r| self.dump_place(*r));
22                 self.pop_stack_frame(/* unwinding */ false)?
23             }
24
25             Goto { target } => self.go_to_block(target),
26
27             SwitchInt { ref discr, ref values, ref targets, .. } => {
28                 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
29                 trace!("SwitchInt({:?})", *discr);
30
31                 // Branch to the `otherwise` case by default, if no match is found.
32                 let mut target_block = targets[targets.len() - 1];
33
34                 for (index, &const_int) in values.iter().enumerate() {
35                     // Compare using binary_op, to also support pointer values
36                     let res = self
37                         .overflowing_binary_op(
38                             mir::BinOp::Eq,
39                             discr,
40                             ImmTy::from_uint(const_int, discr.layout),
41                         )?
42                         .0;
43                     if res.to_bool()? {
44                         target_block = targets[index];
45                         break;
46                     }
47                 }
48
49                 self.go_to_block(target_block);
50             }
51
52             Call { ref func, ref args, ref destination, ref cleanup, .. } => {
53                 let func = self.eval_operand(func, None)?;
54                 let (fn_val, abi) = match func.layout.ty.kind {
55                     ty::FnPtr(sig) => {
56                         let caller_abi = sig.abi();
57                         let fn_ptr = self.read_scalar(func)?.not_undef()?;
58                         let fn_val = self.memory.get_fn(fn_ptr)?;
59                         (fn_val, caller_abi)
60                     }
61                     ty::FnDef(def_id, substs) => {
62                         let sig = func.layout.ty.fn_sig(*self.tcx);
63                         (FnVal::Instance(self.resolve(def_id, substs)?), sig.abi())
64                     }
65                     _ => bug!("invalid callee of type {:?}", func.layout.ty),
66                 };
67                 let args = self.eval_operands(args)?;
68                 let ret = match destination {
69                     Some((dest, ret)) => Some((self.eval_place(dest)?, *ret)),
70                     None => None,
71                 };
72                 self.eval_fn_call(
73                     fn_val,
74                     terminator.source_info.span,
75                     abi,
76                     &args[..],
77                     ret,
78                     *cleanup,
79                 )?;
80             }
81
82             Drop { ref location, target, unwind } => {
83                 // FIXME(CTFE): forbid drop in const eval
84                 let place = self.eval_place(location)?;
85                 let ty = place.layout.ty;
86                 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
87
88                 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
89                 self.drop_in_place(place, instance, terminator.source_info.span, target, unwind)?;
90             }
91
92             Assert { ref cond, expected, ref msg, target, cleanup } => {
93                 let cond_val =
94                     self.read_immediate(self.eval_operand(cond, None)?)?.to_scalar()?.to_bool()?;
95                 if expected == cond_val {
96                     self.go_to_block(target);
97                 } else {
98                     M::assert_panic(self, terminator.source_info.span, msg, cleanup)?;
99                 }
100             }
101
102             // When we encounter Resume, we've finished unwinding
103             // cleanup for the current stack frame. We pop it in order
104             // to continue unwinding the next frame
105             Resume => {
106                 trace!("unwinding: resuming from cleanup");
107                 // By definition, a Resume terminator means
108                 // that we're unwinding
109                 self.pop_stack_frame(/* unwinding */ true)?;
110                 return Ok(());
111             }
112
113             // It is UB to ever encounter this.
114             Unreachable => throw_ub!(Unreachable),
115
116             // These should never occur for MIR we actually run.
117             DropAndReplace { .. } | FalseEdges { .. } | FalseUnwind { .. } => {
118                 bug!("{:#?} should have been eliminated by MIR pass", terminator.kind)
119             }
120
121             // These are not (yet) supported. It is unclear if they even can occur in
122             // MIR that we actually run.
123             Yield { .. } | GeneratorDrop | Abort => {
124                 throw_unsup_format!("Unsupported terminator kind: {:#?}", terminator.kind)
125             }
126         }
127
128         Ok(())
129     }
130
131     fn check_argument_compat(
132         rust_abi: bool,
133         caller: TyLayout<'tcx>,
134         callee: TyLayout<'tcx>,
135     ) -> bool {
136         if caller.ty == callee.ty {
137             // No question
138             return true;
139         }
140         if !rust_abi {
141             // Don't risk anything
142             return false;
143         }
144         // Compare layout
145         match (&caller.abi, &callee.abi) {
146             // Different valid ranges are okay (once we enforce validity,
147             // that will take care to make it UB to leave the range, just
148             // like for transmute).
149             (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) => {
150                 caller.value == callee.value
151             }
152             (
153                 layout::Abi::ScalarPair(ref caller1, ref caller2),
154                 layout::Abi::ScalarPair(ref callee1, ref callee2),
155             ) => caller1.value == callee1.value && caller2.value == callee2.value,
156             // Be conservative
157             _ => false,
158         }
159     }
160
161     /// Pass a single argument, checking the types for compatibility.
162     fn pass_argument(
163         &mut self,
164         rust_abi: bool,
165         caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>,
166         callee_arg: PlaceTy<'tcx, M::PointerTag>,
167     ) -> InterpResult<'tcx> {
168         if rust_abi && callee_arg.layout.is_zst() {
169             // Nothing to do.
170             trace!("Skipping callee ZST");
171             return Ok(());
172         }
173         let caller_arg = caller_arg.next().ok_or_else(|| err_unsup!(FunctionArgCountMismatch))?;
174         if rust_abi {
175             assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
176         }
177         // Now, check
178         if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
179             throw_unsup!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty))
180         }
181         // We allow some transmutes here
182         self.copy_op_transmute(caller_arg, callee_arg)
183     }
184
185     /// Call this function -- pushing the stack frame and initializing the arguments.
186     fn eval_fn_call(
187         &mut self,
188         fn_val: FnVal<'tcx, M::ExtraFnVal>,
189         span: Span,
190         caller_abi: Abi,
191         args: &[OpTy<'tcx, M::PointerTag>],
192         ret: Option<(PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
193         unwind: Option<mir::BasicBlock>,
194     ) -> InterpResult<'tcx> {
195         trace!("eval_fn_call: {:#?}", fn_val);
196
197         let instance = match fn_val {
198             FnVal::Instance(instance) => instance,
199             FnVal::Other(extra) => {
200                 return M::call_extra_fn(self, extra, args, ret, unwind);
201             }
202         };
203
204         // ABI check
205         {
206             let callee_abi = {
207                 let instance_ty = instance.ty_env(*self.tcx, self.param_env);
208                 match instance_ty.kind {
209                     ty::FnDef(..) => instance_ty.fn_sig(*self.tcx).abi(),
210                     ty::Closure(..) => Abi::RustCall,
211                     ty::Generator(..) => Abi::Rust,
212                     _ => bug!("unexpected callee ty: {:?}", instance_ty),
213                 }
214             };
215             let normalize_abi = |abi| match abi {
216                 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
217                 // These are all the same ABI, really.
218                 {
219                     Abi::Rust
220                 }
221                 abi => abi,
222             };
223             if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
224                 throw_unsup!(FunctionAbiMismatch(caller_abi, callee_abi))
225             }
226         }
227
228         match instance.def {
229             ty::InstanceDef::Intrinsic(..) => {
230                 assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic);
231                 return M::call_intrinsic(self, span, instance, args, ret, unwind);
232             }
233             ty::InstanceDef::VtableShim(..)
234             | ty::InstanceDef::ReifyShim(..)
235             | ty::InstanceDef::ClosureOnceShim { .. }
236             | ty::InstanceDef::FnPtrShim(..)
237             | ty::InstanceDef::DropGlue(..)
238             | ty::InstanceDef::CloneShim(..)
239             | ty::InstanceDef::Item(_) => {
240                 // We need MIR for this fn
241                 let body = match M::find_mir_or_eval_fn(self, span, instance, args, ret, unwind)? {
242                     Some(body) => body,
243                     None => return Ok(()),
244                 };
245
246                 self.push_stack_frame(
247                     instance,
248                     span,
249                     body,
250                     ret.map(|p| p.0),
251                     StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind },
252                 )?;
253
254                 // We want to pop this frame again in case there was an error, to put
255                 // the blame in the right location.  Until the 2018 edition is used in
256                 // the compiler, we have to do this with an immediately invoked function.
257                 let res =
258                     (|| {
259                         trace!(
260                             "caller ABI: {:?}, args: {:#?}",
261                             caller_abi,
262                             args.iter()
263                                 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
264                                 .collect::<Vec<_>>()
265                         );
266                         trace!(
267                             "spread_arg: {:?}, locals: {:#?}",
268                             body.spread_arg,
269                             body.args_iter()
270                                 .map(|local| (
271                                     local,
272                                     self.layout_of_local(self.frame(), local, None).unwrap().ty
273                                 ))
274                                 .collect::<Vec<_>>()
275                         );
276
277                         // Figure out how to pass which arguments.
278                         // The Rust ABI is special: ZST get skipped.
279                         let rust_abi = match caller_abi {
280                             Abi::Rust | Abi::RustCall => true,
281                             _ => false,
282                         };
283                         // We have two iterators: Where the arguments come from,
284                         // and where they go to.
285
286                         // For where they come from: If the ABI is RustCall, we untuple the
287                         // last incoming argument.  These two iterators do not have the same type,
288                         // so to keep the code paths uniform we accept an allocation
289                         // (for RustCall ABI only).
290                         let caller_args: Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
291                             if caller_abi == Abi::RustCall && !args.is_empty() {
292                                 // Untuple
293                                 let (&untuple_arg, args) = args.split_last().unwrap();
294                                 trace!("eval_fn_call: Will pass last argument by untupling");
295                                 Cow::from(args.iter().map(|&a| Ok(a))
296                                 .chain((0..untuple_arg.layout.fields.count())
297                                     .map(|i| self.operand_field(untuple_arg, i as u64))
298                                 )
299                                 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
300                             } else {
301                                 // Plain arg passing
302                                 Cow::from(args)
303                             };
304                         // Skip ZSTs
305                         let mut caller_iter = caller_args
306                             .iter()
307                             .filter(|op| !rust_abi || !op.layout.is_zst())
308                             .copied();
309
310                         // Now we have to spread them out across the callee's locals,
311                         // taking into account the `spread_arg`.  If we could write
312                         // this is a single iterator (that handles `spread_arg`), then
313                         // `pass_argument` would be the loop body. It takes care to
314                         // not advance `caller_iter` for ZSTs
315                         for local in body.args_iter() {
316                             let dest = self.eval_place(&mir::Place::from(local))?;
317                             if Some(local) == body.spread_arg {
318                                 // Must be a tuple
319                                 for i in 0..dest.layout.fields.count() {
320                                     let dest = self.place_field(dest, i as u64)?;
321                                     self.pass_argument(rust_abi, &mut caller_iter, dest)?;
322                                 }
323                             } else {
324                                 // Normal argument
325                                 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
326                             }
327                         }
328                         // Now we should have no more caller args
329                         if caller_iter.next().is_some() {
330                             trace!("Caller has passed too many args");
331                             throw_unsup!(FunctionArgCountMismatch)
332                         }
333                         // Don't forget to check the return type!
334                         if let Some((caller_ret, _)) = ret {
335                             let callee_ret = self.eval_place(&mir::Place::return_place())?;
336                             if !Self::check_argument_compat(
337                                 rust_abi,
338                                 caller_ret.layout,
339                                 callee_ret.layout,
340                             ) {
341                                 throw_unsup!(FunctionRetMismatch(
342                                     caller_ret.layout.ty,
343                                     callee_ret.layout.ty
344                                 ))
345                             }
346                         } else {
347                             let local = mir::RETURN_PLACE;
348                             let callee_layout = self.layout_of_local(self.frame(), local, None)?;
349                             if !callee_layout.abi.is_uninhabited() {
350                                 throw_unsup!(FunctionRetMismatch(
351                                     self.tcx.types.never,
352                                     callee_layout.ty
353                                 ))
354                             }
355                         }
356                         Ok(())
357                     })();
358                 match res {
359                     Err(err) => {
360                         self.stack.pop();
361                         Err(err)
362                     }
363                     Ok(v) => Ok(v),
364                 }
365             }
366             // cannot use the shim here, because that will only result in infinite recursion
367             ty::InstanceDef::Virtual(_, idx) => {
368                 let mut args = args.to_vec();
369                 // We have to implement all "object safe receivers".  Currently we
370                 // support built-in pointers (&, &mut, Box) as well as unsized-self.  We do
371                 // not yet support custom self types.
372                 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
373                 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
374                     Some(_) => {
375                         // Built-in pointer.
376                         self.deref_operand(args[0])?
377                     }
378                     None => {
379                         // Unsized self.
380                         args[0].assert_mem_place(self)
381                     }
382                 };
383                 // Find and consult vtable
384                 let vtable = receiver_place.vtable();
385                 let drop_fn = self.get_vtable_slot(vtable, idx)?;
386
387                 // `*mut receiver_place.layout.ty` is almost the layout that we
388                 // want for args[0]: We have to project to field 0 because we want
389                 // a thin pointer.
390                 assert!(receiver_place.layout.is_unsized());
391                 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
392                 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
393                 // Adjust receiver argument.
394                 args[0] =
395                     OpTy::from(ImmTy { layout: this_receiver_ptr, imm: receiver_place.ptr.into() });
396                 trace!("Patched self operand to {:#?}", args[0]);
397                 // recurse with concrete function
398                 self.eval_fn_call(drop_fn, span, caller_abi, &args, ret, unwind)
399             }
400         }
401     }
402
403     fn drop_in_place(
404         &mut self,
405         place: PlaceTy<'tcx, M::PointerTag>,
406         instance: ty::Instance<'tcx>,
407         span: Span,
408         target: mir::BasicBlock,
409         unwind: Option<mir::BasicBlock>,
410     ) -> InterpResult<'tcx> {
411         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
412         // We take the address of the object.  This may well be unaligned, which is fine
413         // for us here.  However, unaligned accesses will probably make the actual drop
414         // implementation fail -- a problem shared by rustc.
415         let place = self.force_allocation(place)?;
416
417         let (instance, place) = match place.layout.ty.kind {
418             ty::Dynamic(..) => {
419                 // Dropping a trait object.
420                 self.unpack_dyn_trait(place)?
421             }
422             _ => (instance, place),
423         };
424
425         let arg = ImmTy {
426             imm: place.to_ref(),
427             layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
428         };
429
430         let ty = self.tcx.mk_unit(); // return type is ()
431         let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
432
433         self.eval_fn_call(
434             FnVal::Instance(instance),
435             span,
436             Abi::Rust,
437             &[arg.into()],
438             Some((dest.into(), target)),
439             unwind,
440         )
441     }
442 }