]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_const_eval/src/interpret/terminator.rs
Auto merge of #93873 - Stovent:big-ints, r=m-ou-se
[rust.git] / compiler / rustc_const_eval / src / interpret / terminator.rs
1 use std::borrow::Cow;
2
3 use rustc_ast::ast::InlineAsmOptions;
4 use rustc_middle::ty::layout::{FnAbiOf, LayoutOf};
5 use rustc_middle::ty::Instance;
6 use rustc_middle::{
7     mir,
8     ty::{self, Ty},
9 };
10 use rustc_target::abi;
11 use rustc_target::abi::call::{ArgAbi, ArgAttribute, ArgAttributes, FnAbi, PassMode};
12 use rustc_target::spec::abi::Abi;
13
14 use super::{
15     FnVal, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemoryKind, OpTy, Operand,
16     PlaceTy, Scalar, StackPopCleanup, StackPopUnwind,
17 };
18
19 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
20     pub(super) fn eval_terminator(
21         &mut self,
22         terminator: &mir::Terminator<'tcx>,
23     ) -> InterpResult<'tcx> {
24         use rustc_middle::mir::TerminatorKind::*;
25         match terminator.kind {
26             Return => {
27                 self.pop_stack_frame(/* unwinding */ false)?
28             }
29
30             Goto { target } => self.go_to_block(target),
31
32             SwitchInt { ref discr, ref targets, switch_ty } => {
33                 let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
34                 trace!("SwitchInt({:?})", *discr);
35                 assert_eq!(discr.layout.ty, switch_ty);
36
37                 // Branch to the `otherwise` case by default, if no match is found.
38                 assert!(!targets.iter().is_empty());
39                 let mut target_block = targets.otherwise();
40
41                 for (const_int, target) in targets.iter() {
42                     // Compare using MIR BinOp::Eq, to also support pointer values.
43                     // (Avoiding `self.binary_op` as that does some redundant layout computation.)
44                     let res = self
45                         .overflowing_binary_op(
46                             mir::BinOp::Eq,
47                             &discr,
48                             &ImmTy::from_uint(const_int, discr.layout),
49                         )?
50                         .0;
51                     if res.to_bool()? {
52                         target_block = target;
53                         break;
54                     }
55                 }
56
57                 self.go_to_block(target_block);
58             }
59
60             Call {
61                 ref func,
62                 ref args,
63                 destination,
64                 target,
65                 ref cleanup,
66                 from_hir_call: _,
67                 fn_span: _,
68             } => {
69                 let old_stack = self.frame_idx();
70                 let old_loc = self.frame().loc;
71                 let func = self.eval_operand(func, None)?;
72                 let args = self.eval_operands(args)?;
73
74                 let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx);
75                 let fn_sig =
76                     self.tcx.normalize_erasing_late_bound_regions(self.param_env, fn_sig_binder);
77                 let extra_args = &args[fn_sig.inputs().len()..];
78                 let extra_args = self.tcx.mk_type_list(extra_args.iter().map(|arg| arg.layout.ty));
79
80                 let (fn_val, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
81                     ty::FnPtr(_sig) => {
82                         let fn_ptr = self.read_pointer(&func)?;
83                         let fn_val = self.get_ptr_fn(fn_ptr)?;
84                         (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
85                     }
86                     ty::FnDef(def_id, substs) => {
87                         let instance =
88                             self.resolve(ty::WithOptConstParam::unknown(def_id), substs)?;
89                         (
90                             FnVal::Instance(instance),
91                             self.fn_abi_of_instance(instance, extra_args)?,
92                             instance.def.requires_caller_location(*self.tcx),
93                         )
94                     }
95                     _ => span_bug!(
96                         terminator.source_info.span,
97                         "invalid callee of type {:?}",
98                         func.layout.ty
99                     ),
100                 };
101
102                 let destination = self.eval_place(destination)?;
103                 self.eval_fn_call(
104                     fn_val,
105                     (fn_sig.abi, fn_abi),
106                     &args,
107                     with_caller_location,
108                     &destination,
109                     target,
110                     match (cleanup, fn_abi.can_unwind) {
111                         (Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup),
112                         (None, true) => StackPopUnwind::Skip,
113                         (_, false) => StackPopUnwind::NotAllowed,
114                     },
115                 )?;
116                 // Sanity-check that `eval_fn_call` either pushed a new frame or
117                 // did a jump to another block.
118                 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
119                     span_bug!(terminator.source_info.span, "evaluating this call made no progress");
120                 }
121             }
122
123             Drop { place, target, unwind } => {
124                 let place = self.eval_place(place)?;
125                 let ty = place.layout.ty;
126                 trace!("TerminatorKind::drop: {:?}, type {}", place, ty);
127
128                 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
129                 self.drop_in_place(&place, instance, target, unwind)?;
130             }
131
132             Assert { ref cond, expected, ref msg, target, cleanup } => {
133                 let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
134                 if expected == cond_val {
135                     self.go_to_block(target);
136                 } else {
137                     M::assert_panic(self, msg, cleanup)?;
138                 }
139             }
140
141             Abort => {
142                 M::abort(self, "the program aborted execution".to_owned())?;
143             }
144
145             // When we encounter Resume, we've finished unwinding
146             // cleanup for the current stack frame. We pop it in order
147             // to continue unwinding the next frame
148             Resume => {
149                 trace!("unwinding: resuming from cleanup");
150                 // By definition, a Resume terminator means
151                 // that we're unwinding
152                 self.pop_stack_frame(/* unwinding */ true)?;
153                 return Ok(());
154             }
155
156             // It is UB to ever encounter this.
157             Unreachable => throw_ub!(Unreachable),
158
159             // These should never occur for MIR we actually run.
160             DropAndReplace { .. }
161             | FalseEdge { .. }
162             | FalseUnwind { .. }
163             | Yield { .. }
164             | GeneratorDrop => span_bug!(
165                 terminator.source_info.span,
166                 "{:#?} should have been eliminated by MIR pass",
167                 terminator.kind
168             ),
169
170             InlineAsm { template, ref operands, options, destination, .. } => {
171                 M::eval_inline_asm(self, template, operands, options)?;
172                 if options.contains(InlineAsmOptions::NORETURN) {
173                     throw_ub_format!("returned from noreturn inline assembly");
174                 }
175                 self.go_to_block(
176                     destination
177                         .expect("InlineAsm terminators without noreturn must have a destination"),
178                 )
179             }
180         }
181
182         Ok(())
183     }
184
185     fn check_argument_compat(
186         caller_abi: &ArgAbi<'tcx, Ty<'tcx>>,
187         callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
188     ) -> bool {
189         // Heuristic for type comparison.
190         let layout_compat = || {
191             if caller_abi.layout.ty == callee_abi.layout.ty {
192                 // No question
193                 return true;
194             }
195             if caller_abi.layout.is_unsized() || callee_abi.layout.is_unsized() {
196                 // No, no, no. We require the types to *exactly* match for unsized arguments. If
197                 // these are somehow unsized "in a different way" (say, `dyn Trait` vs `[i32]`),
198                 // then who knows what happens.
199                 return false;
200             }
201             if caller_abi.layout.size != callee_abi.layout.size
202                 || caller_abi.layout.align.abi != callee_abi.layout.align.abi
203             {
204                 // This cannot go well...
205                 return false;
206             }
207             // The rest *should* be okay, but we are extra conservative.
208             match (caller_abi.layout.abi, callee_abi.layout.abi) {
209                 // Different valid ranges are okay (once we enforce validity,
210                 // that will take care to make it UB to leave the range, just
211                 // like for transmute).
212                 (abi::Abi::Scalar(caller), abi::Abi::Scalar(callee)) => {
213                     caller.primitive() == callee.primitive()
214                 }
215                 (
216                     abi::Abi::ScalarPair(caller1, caller2),
217                     abi::Abi::ScalarPair(callee1, callee2),
218                 ) => {
219                     caller1.primitive() == callee1.primitive()
220                         && caller2.primitive() == callee2.primitive()
221                 }
222                 // Be conservative
223                 _ => false,
224             }
225         };
226         // When comparing the PassMode, we have to be smart about comparing the attributes.
227         let arg_attr_compat = |a1: &ArgAttributes, a2: &ArgAttributes| {
228             // There's only one regular attribute that matters for the call ABI: InReg.
229             // Everything else is things like noalias, dereferenceable, nonnull, ...
230             // (This also applies to pointee_size, pointee_align.)
231             if a1.regular.contains(ArgAttribute::InReg) != a2.regular.contains(ArgAttribute::InReg)
232             {
233                 return false;
234             }
235             // We also compare the sign extension mode -- this could let the callee make assumptions
236             // about bits that conceptually were not even passed.
237             if a1.arg_ext != a2.arg_ext {
238                 return false;
239             }
240             return true;
241         };
242         let mode_compat = || match (&caller_abi.mode, &callee_abi.mode) {
243             (PassMode::Ignore, PassMode::Ignore) => true,
244             (PassMode::Direct(a1), PassMode::Direct(a2)) => arg_attr_compat(a1, a2),
245             (PassMode::Pair(a1, b1), PassMode::Pair(a2, b2)) => {
246                 arg_attr_compat(a1, a2) && arg_attr_compat(b1, b2)
247             }
248             (PassMode::Cast(c1, pad1), PassMode::Cast(c2, pad2)) => c1 == c2 && pad1 == pad2,
249             (
250                 PassMode::Indirect { attrs: a1, extra_attrs: None, on_stack: s1 },
251                 PassMode::Indirect { attrs: a2, extra_attrs: None, on_stack: s2 },
252             ) => arg_attr_compat(a1, a2) && s1 == s2,
253             (
254                 PassMode::Indirect { attrs: a1, extra_attrs: Some(e1), on_stack: s1 },
255                 PassMode::Indirect { attrs: a2, extra_attrs: Some(e2), on_stack: s2 },
256             ) => arg_attr_compat(a1, a2) && arg_attr_compat(e1, e2) && s1 == s2,
257             _ => false,
258         };
259
260         if layout_compat() && mode_compat() {
261             return true;
262         }
263         trace!(
264             "check_argument_compat: incompatible ABIs:\ncaller: {:?}\ncallee: {:?}",
265             caller_abi,
266             callee_abi
267         );
268         return false;
269     }
270
271     /// Initialize a single callee argument, checking the types for compatibility.
272     fn pass_argument<'x, 'y>(
273         &mut self,
274         caller_args: &mut impl Iterator<
275             Item = (&'x OpTy<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
276         >,
277         callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
278         callee_arg: &PlaceTy<'tcx, M::Provenance>,
279     ) -> InterpResult<'tcx>
280     where
281         'tcx: 'x,
282         'tcx: 'y,
283     {
284         if matches!(callee_abi.mode, PassMode::Ignore) {
285             // This one is skipped.
286             return Ok(());
287         }
288         // Find next caller arg.
289         let (caller_arg, caller_abi) = caller_args.next().ok_or_else(|| {
290             err_ub_format!("calling a function with fewer arguments than it requires")
291         })?;
292         // Now, check
293         if !Self::check_argument_compat(caller_abi, callee_abi) {
294             throw_ub_format!(
295                 "calling a function with argument of type {:?} passing data of type {:?}",
296                 callee_arg.layout.ty,
297                 caller_arg.layout.ty
298             )
299         }
300         // Special handling for unsized parameters.
301         if caller_arg.layout.is_unsized() {
302             // `check_argument_compat` ensures that both have the same type, so we know they will use the metadata the same way.
303             assert_eq!(caller_arg.layout.ty, callee_arg.layout.ty);
304             // We have to properly pre-allocate the memory for the callee.
305             // So let's tear down some wrappers.
306             // This all has to be in memory, there are no immediate unsized values.
307             let src = caller_arg.assert_mem_place();
308             // The destination cannot be one of these "spread args".
309             let (dest_frame, dest_local) = callee_arg.assert_local();
310             // We are just initializing things, so there can't be anything here yet.
311             assert!(matches!(
312                 *self.local_to_op(&self.stack()[dest_frame], dest_local, None)?,
313                 Operand::Immediate(Immediate::Uninit)
314             ));
315             // Allocate enough memory to hold `src`.
316             let Some((size, align)) = self.size_and_align_of_mplace(&src)? else {
317                 span_bug!(self.cur_span(), "unsized fn arg with `extern` type tail should not be allowed")
318             };
319             let ptr = self.allocate_ptr(size, align, MemoryKind::Stack)?;
320             let dest_place =
321                 MPlaceTy::from_aligned_ptr_with_meta(ptr.into(), callee_arg.layout, src.meta);
322             // Update the local to be that new place.
323             *M::access_local_mut(self, dest_frame, dest_local)? = Operand::Indirect(*dest_place);
324         }
325         // We allow some transmutes here.
326         // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
327         // is true for all `copy_op`, but there are a lot of special cases for argument passing
328         // specifically.)
329         self.copy_op(&caller_arg, callee_arg, /*allow_transmute*/ true)
330     }
331
332     /// Call this function -- pushing the stack frame and initializing the arguments.
333     ///
334     /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way.
335     /// However, we also need `caller_abi` to determine if we need to do untupling of arguments.
336     ///
337     /// `with_caller_location` indicates whether the caller passed a caller location. Miri
338     /// implements caller locations without argument passing, but to match `FnAbi` we need to know
339     /// when those arguments are present.
340     pub(crate) fn eval_fn_call(
341         &mut self,
342         fn_val: FnVal<'tcx, M::ExtraFnVal>,
343         (caller_abi, caller_fn_abi): (Abi, &FnAbi<'tcx, Ty<'tcx>>),
344         args: &[OpTy<'tcx, M::Provenance>],
345         with_caller_location: bool,
346         destination: &PlaceTy<'tcx, M::Provenance>,
347         target: Option<mir::BasicBlock>,
348         mut unwind: StackPopUnwind,
349     ) -> InterpResult<'tcx> {
350         trace!("eval_fn_call: {:#?}", fn_val);
351
352         let instance = match fn_val {
353             FnVal::Instance(instance) => instance,
354             FnVal::Other(extra) => {
355                 return M::call_extra_fn(
356                     self,
357                     extra,
358                     caller_abi,
359                     args,
360                     destination,
361                     target,
362                     unwind,
363                 );
364             }
365         };
366
367         match instance.def {
368             ty::InstanceDef::Intrinsic(def_id) => {
369                 assert!(self.tcx.is_intrinsic(def_id));
370                 // caller_fn_abi is not relevant here, we interpret the arguments directly for each intrinsic.
371                 M::call_intrinsic(self, instance, args, destination, target, unwind)
372             }
373             ty::InstanceDef::VTableShim(..)
374             | ty::InstanceDef::ReifyShim(..)
375             | ty::InstanceDef::ClosureOnceShim { .. }
376             | ty::InstanceDef::FnPtrShim(..)
377             | ty::InstanceDef::DropGlue(..)
378             | ty::InstanceDef::CloneShim(..)
379             | ty::InstanceDef::Item(_) => {
380                 // We need MIR for this fn
381                 let Some((body, instance)) =
382                     M::find_mir_or_eval_fn(self, instance, caller_abi, args, destination, target, unwind)? else {
383                         return Ok(());
384                     };
385
386                 // Compute callee information using the `instance` returned by
387                 // `find_mir_or_eval_fn`.
388                 // FIXME: for variadic support, do we have to somehow determine callee's extra_args?
389                 let callee_fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
390
391                 if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic {
392                     throw_unsup_format!("calling a c-variadic function is not supported");
393                 }
394
395                 if M::enforce_abi(self) {
396                     if caller_fn_abi.conv != callee_fn_abi.conv {
397                         throw_ub_format!(
398                             "calling a function with calling convention {:?} using calling convention {:?}",
399                             callee_fn_abi.conv,
400                             caller_fn_abi.conv
401                         )
402                     }
403                 }
404
405                 if !matches!(unwind, StackPopUnwind::NotAllowed) && !callee_fn_abi.can_unwind {
406                     // The callee cannot unwind.
407                     unwind = StackPopUnwind::NotAllowed;
408                 }
409
410                 self.push_stack_frame(
411                     instance,
412                     body,
413                     destination,
414                     StackPopCleanup::Goto { ret: target, unwind },
415                 )?;
416
417                 // If an error is raised here, pop the frame again to get an accurate backtrace.
418                 // To this end, we wrap it all in a `try` block.
419                 let res: InterpResult<'tcx> = try {
420                     trace!(
421                         "caller ABI: {:?}, args: {:#?}",
422                         caller_abi,
423                         args.iter()
424                             .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
425                             .collect::<Vec<_>>()
426                     );
427                     trace!(
428                         "spread_arg: {:?}, locals: {:#?}",
429                         body.spread_arg,
430                         body.args_iter()
431                             .map(|local| (
432                                 local,
433                                 self.layout_of_local(self.frame(), local, None).unwrap().ty
434                             ))
435                             .collect::<Vec<_>>()
436                     );
437
438                     // In principle, we have two iterators: Where the arguments come from, and where
439                     // they go to.
440
441                     // For where they come from: If the ABI is RustCall, we untuple the
442                     // last incoming argument.  These two iterators do not have the same type,
443                     // so to keep the code paths uniform we accept an allocation
444                     // (for RustCall ABI only).
445                     let caller_args: Cow<'_, [OpTy<'tcx, M::Provenance>]> =
446                         if caller_abi == Abi::RustCall && !args.is_empty() {
447                             // Untuple
448                             let (untuple_arg, args) = args.split_last().unwrap();
449                             trace!("eval_fn_call: Will pass last argument by untupling");
450                             Cow::from(
451                                 args.iter()
452                                     .map(|a| Ok(a.clone()))
453                                     .chain(
454                                         (0..untuple_arg.layout.fields.count())
455                                             .map(|i| self.operand_field(untuple_arg, i)),
456                                     )
457                                     .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::Provenance>>>>(
458                                     )?,
459                             )
460                         } else {
461                             // Plain arg passing
462                             Cow::from(args)
463                         };
464                     // If `with_caller_location` is set we pretend there is an extra argument (that
465                     // we will not pass).
466                     assert_eq!(
467                         caller_args.len() + if with_caller_location { 1 } else { 0 },
468                         caller_fn_abi.args.len(),
469                         "mismatch between caller ABI and caller arguments",
470                     );
471                     let mut caller_args = caller_args
472                         .iter()
473                         .zip(caller_fn_abi.args.iter())
474                         .filter(|arg_and_abi| !matches!(arg_and_abi.1.mode, PassMode::Ignore));
475
476                     // Now we have to spread them out across the callee's locals,
477                     // taking into account the `spread_arg`.  If we could write
478                     // this is a single iterator (that handles `spread_arg`), then
479                     // `pass_argument` would be the loop body. It takes care to
480                     // not advance `caller_iter` for ZSTs.
481                     let mut callee_args_abis = callee_fn_abi.args.iter();
482                     for local in body.args_iter() {
483                         let dest = self.eval_place(mir::Place::from(local))?;
484                         if Some(local) == body.spread_arg {
485                             // Must be a tuple
486                             for i in 0..dest.layout.fields.count() {
487                                 let dest = self.place_field(&dest, i)?;
488                                 let callee_abi = callee_args_abis.next().unwrap();
489                                 self.pass_argument(&mut caller_args, callee_abi, &dest)?;
490                             }
491                         } else {
492                             // Normal argument
493                             let callee_abi = callee_args_abis.next().unwrap();
494                             self.pass_argument(&mut caller_args, callee_abi, &dest)?;
495                         }
496                     }
497                     // If the callee needs a caller location, pretend we consume one more argument from the ABI.
498                     if instance.def.requires_caller_location(*self.tcx) {
499                         callee_args_abis.next().unwrap();
500                     }
501                     // Now we should have no more caller args or callee arg ABIs
502                     assert!(
503                         callee_args_abis.next().is_none(),
504                         "mismatch between callee ABI and callee body arguments"
505                     );
506                     if caller_args.next().is_some() {
507                         throw_ub_format!("calling a function with more arguments than it expected")
508                     }
509                     // Don't forget to check the return type!
510                     if !Self::check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret) {
511                         throw_ub_format!(
512                             "calling a function with return type {:?} passing \
513                                     return place of type {:?}",
514                             callee_fn_abi.ret.layout.ty,
515                             caller_fn_abi.ret.layout.ty,
516                         )
517                     }
518                 };
519                 match res {
520                     Err(err) => {
521                         self.stack_mut().pop();
522                         Err(err)
523                     }
524                     Ok(()) => Ok(()),
525                 }
526             }
527             // cannot use the shim here, because that will only result in infinite recursion
528             ty::InstanceDef::Virtual(def_id, idx) => {
529                 let mut args = args.to_vec();
530                 // We have to implement all "object safe receivers". So we have to go search for a
531                 // pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
532                 // unwrap those newtypes until we are there.
533                 let mut receiver = args[0].clone();
534                 let receiver_place = loop {
535                     match receiver.layout.ty.kind() {
536                         ty::Ref(..) | ty::RawPtr(..) => break self.deref_operand(&receiver)?,
537                         ty::Dynamic(..) => break receiver.assert_mem_place(), // no immediate unsized values
538                         _ => {
539                             // Not there yet, search for the only non-ZST field.
540                             let mut non_zst_field = None;
541                             for i in 0..receiver.layout.fields.count() {
542                                 let field = self.operand_field(&receiver, i)?;
543                                 let zst =
544                                     field.layout.is_zst() && field.layout.align.abi.bytes() == 1;
545                                 if !zst {
546                                     assert!(
547                                         non_zst_field.is_none(),
548                                         "multiple non-ZST fields in dyn receiver type {}",
549                                         receiver.layout.ty
550                                     );
551                                     non_zst_field = Some(field);
552                                 }
553                             }
554                             receiver = non_zst_field.unwrap_or_else(|| {
555                                 panic!(
556                                     "no non-ZST fields in dyn receiver type {}",
557                                     receiver.layout.ty
558                                 )
559                             });
560                         }
561                     }
562                 };
563                 // Obtain the underlying trait we are working on.
564                 let receiver_tail = self
565                     .tcx
566                     .struct_tail_erasing_lifetimes(receiver_place.layout.ty, self.param_env);
567                 let ty::Dynamic(data, ..) = receiver_tail.kind() else {
568                     span_bug!(self.cur_span(), "dynamic call on non-`dyn` type {}", receiver_tail)
569                 };
570
571                 // Get the required information from the vtable.
572                 let vptr = receiver_place.meta.unwrap_meta().to_pointer(self)?;
573                 let (dyn_ty, dyn_trait) = self.get_ptr_vtable(vptr)?;
574                 if dyn_trait != data.principal() {
575                     throw_ub_format!(
576                         "`dyn` call on a pointer whose vtable does not match its type"
577                     );
578                 }
579
580                 // Now determine the actual method to call. We can do that in two different ways and
581                 // compare them to ensure everything fits.
582                 let Some(ty::VtblEntry::Method(fn_inst)) = self.get_vtable_entries(vptr)?.get(idx).copied() else {
583                     throw_ub_format!("`dyn` call trying to call something that is not a method")
584                 };
585                 if cfg!(debug_assertions) {
586                     let tcx = *self.tcx;
587
588                     let trait_def_id = tcx.trait_of_item(def_id).unwrap();
589                     let virtual_trait_ref =
590                         ty::TraitRef::from_method(tcx, trait_def_id, instance.substs);
591                     assert_eq!(
592                         receiver_tail,
593                         virtual_trait_ref.self_ty(),
594                         "mismatch in underlying dyn trait computation within Miri and MIR building",
595                     );
596                     let existential_trait_ref =
597                         ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
598                     let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
599
600                     let concrete_method = Instance::resolve_for_vtable(
601                         tcx,
602                         self.param_env,
603                         def_id,
604                         instance.substs.rebase_onto(tcx, trait_def_id, concrete_trait_ref.substs),
605                     )
606                     .unwrap();
607                     assert_eq!(fn_inst, concrete_method);
608                 }
609
610                 // `*mut receiver_place.layout.ty` is almost the layout that we
611                 // want for args[0]: We have to project to field 0 because we want
612                 // a thin pointer.
613                 assert!(receiver_place.layout.is_unsized());
614                 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
615                 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0);
616                 // Adjust receiver argument.
617                 args[0] = OpTy::from(ImmTy::from_immediate(
618                     Scalar::from_maybe_pointer(receiver_place.ptr, self).into(),
619                     this_receiver_ptr,
620                 ));
621                 trace!("Patched receiver operand to {:#?}", args[0]);
622                 // recurse with concrete function
623                 self.eval_fn_call(
624                     FnVal::Instance(fn_inst),
625                     (caller_abi, caller_fn_abi),
626                     &args,
627                     with_caller_location,
628                     destination,
629                     target,
630                     unwind,
631                 )
632             }
633         }
634     }
635
636     fn drop_in_place(
637         &mut self,
638         place: &PlaceTy<'tcx, M::Provenance>,
639         instance: ty::Instance<'tcx>,
640         target: mir::BasicBlock,
641         unwind: Option<mir::BasicBlock>,
642     ) -> InterpResult<'tcx> {
643         trace!("drop_in_place: {:?},\n  {:?}, {:?}", *place, place.layout.ty, instance);
644         // We take the address of the object.  This may well be unaligned, which is fine
645         // for us here.  However, unaligned accesses will probably make the actual drop
646         // implementation fail -- a problem shared by rustc.
647         let place = self.force_allocation(place)?;
648
649         let (instance, place) = match place.layout.ty.kind() {
650             ty::Dynamic(..) => {
651                 // Dropping a trait object. Need to find actual drop fn.
652                 let place = self.unpack_dyn_trait(&place)?;
653                 let instance = ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty);
654                 (instance, place)
655             }
656             _ => (instance, place),
657         };
658         let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
659
660         let arg = ImmTy::from_immediate(
661             place.to_ref(self),
662             self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
663         );
664         let ret = MPlaceTy::fake_alloc_zst(self.layout_of(self.tcx.types.unit)?);
665
666         self.eval_fn_call(
667             FnVal::Instance(instance),
668             (Abi::Rust, fn_abi),
669             &[arg.into()],
670             false,
671             &ret.into(),
672             Some(target),
673             match unwind {
674                 Some(cleanup) => StackPopUnwind::Cleanup(cleanup),
675                 None => StackPopUnwind::Skip,
676             },
677         )
678     }
679 }