]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_cranelift/src/abi/mod.rs
Merge commit '2bb3996244cf1b89878da9e39841e9f6bf061602' into sync_cg_clif-2022-12-14
[rust.git] / compiler / rustc_codegen_cranelift / src / abi / mod.rs
1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
2
3 mod comments;
4 mod pass_mode;
5 mod returning;
6
7 use cranelift_module::ModuleError;
8 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
9 use rustc_middle::ty::layout::FnAbiOf;
10 use rustc_target::abi::call::{Conv, FnAbi};
11 use rustc_target::spec::abi::Abi;
12
13 use cranelift_codegen::ir::{AbiParam, SigRef};
14
15 use self::pass_mode::*;
16 use crate::prelude::*;
17
18 pub(crate) use self::returning::codegen_return;
19
20 fn clif_sig_from_fn_abi<'tcx>(
21     tcx: TyCtxt<'tcx>,
22     default_call_conv: CallConv,
23     fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
24 ) -> Signature {
25     let call_conv = conv_to_call_conv(fn_abi.conv, default_call_conv);
26
27     let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
28
29     let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
30     // Sometimes the first param is an pointer to the place where the return value needs to be stored.
31     let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
32
33     Signature { params, returns, call_conv }
34 }
35
36 pub(crate) fn conv_to_call_conv(c: Conv, default_call_conv: CallConv) -> CallConv {
37     match c {
38         Conv::Rust | Conv::C => default_call_conv,
39         Conv::RustCold => CallConv::Cold,
40         Conv::X86_64SysV => CallConv::SystemV,
41         Conv::X86_64Win64 => CallConv::WindowsFastcall,
42         Conv::ArmAapcs
43         | Conv::CCmseNonSecureCall
44         | Conv::Msp430Intr
45         | Conv::PtxKernel
46         | Conv::X86Fastcall
47         | Conv::X86Intr
48         | Conv::X86Stdcall
49         | Conv::X86ThisCall
50         | Conv::X86VectorCall
51         | Conv::AmdGpuKernel
52         | Conv::AvrInterrupt
53         | Conv::AvrNonBlockingInterrupt => todo!("{:?}", c),
54     }
55 }
56
57 pub(crate) fn get_function_sig<'tcx>(
58     tcx: TyCtxt<'tcx>,
59     default_call_conv: CallConv,
60     inst: Instance<'tcx>,
61 ) -> Signature {
62     assert!(!inst.substs.needs_infer());
63     clif_sig_from_fn_abi(
64         tcx,
65         default_call_conv,
66         &RevealAllLayoutCx(tcx).fn_abi_of_instance(inst, ty::List::empty()),
67     )
68 }
69
70 /// Instance must be monomorphized
71 pub(crate) fn import_function<'tcx>(
72     tcx: TyCtxt<'tcx>,
73     module: &mut dyn Module,
74     inst: Instance<'tcx>,
75 ) -> FuncId {
76     let name = tcx.symbol_name(inst).name;
77     let sig = get_function_sig(tcx, module.target_config().default_call_conv, inst);
78     match module.declare_function(name, Linkage::Import, &sig) {
79         Ok(func_id) => func_id,
80         Err(ModuleError::IncompatibleDeclaration(_)) => tcx.sess.fatal(&format!(
81             "attempt to declare `{name}` as function, but it was already declared as static"
82         )),
83         Err(ModuleError::IncompatibleSignature(_, prev_sig, new_sig)) => tcx.sess.fatal(&format!(
84             "attempt to declare `{name}` with signature {new_sig:?}, \
85              but it was already declared with signature {prev_sig:?}"
86         )),
87         Err(err) => Err::<_, _>(err).unwrap(),
88     }
89 }
90
91 impl<'tcx> FunctionCx<'_, '_, 'tcx> {
92     /// Instance must be monomorphized
93     pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
94         let func_id = import_function(self.tcx, self.module, inst);
95         let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
96
97         if self.clif_comments.enabled() {
98             self.add_comment(func_ref, format!("{:?}", inst));
99         }
100
101         func_ref
102     }
103
104     pub(crate) fn lib_call(
105         &mut self,
106         name: &str,
107         params: Vec<AbiParam>,
108         returns: Vec<AbiParam>,
109         args: &[Value],
110     ) -> &[Value] {
111         let sig = Signature { params, returns, call_conv: self.target_config.default_call_conv };
112         let func_id = self.module.declare_function(name, Linkage::Import, &sig).unwrap();
113         let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
114         if self.clif_comments.enabled() {
115             self.add_comment(func_ref, format!("{:?}", name));
116         }
117         let call_inst = self.bcx.ins().call(func_ref, args);
118         if self.clif_comments.enabled() {
119             self.add_comment(call_inst, format!("easy_call {}", name));
120         }
121         let results = self.bcx.inst_results(call_inst);
122         assert!(results.len() <= 2, "{}", results.len());
123         results
124     }
125
126     pub(crate) fn easy_call(
127         &mut self,
128         name: &str,
129         args: &[CValue<'tcx>],
130         return_ty: Ty<'tcx>,
131     ) -> CValue<'tcx> {
132         let (input_tys, args): (Vec<_>, Vec<_>) = args
133             .iter()
134             .map(|arg| {
135                 (AbiParam::new(self.clif_type(arg.layout().ty).unwrap()), arg.load_scalar(self))
136             })
137             .unzip();
138         let return_layout = self.layout_of(return_ty);
139         let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
140             tup.iter().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect()
141         } else {
142             vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
143         };
144         let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
145         match *ret_vals {
146             [] => CValue::by_ref(
147                 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
148                 return_layout,
149             ),
150             [val] => CValue::by_val(val, return_layout),
151             [val, extra] => CValue::by_val_pair(val, extra, return_layout),
152             _ => unreachable!(),
153         }
154     }
155 }
156
157 /// Make a [`CPlace`] capable of holding value of the specified type.
158 fn make_local_place<'tcx>(
159     fx: &mut FunctionCx<'_, '_, 'tcx>,
160     local: Local,
161     layout: TyAndLayout<'tcx>,
162     is_ssa: bool,
163 ) -> CPlace<'tcx> {
164     let place = if is_ssa {
165         if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
166             CPlace::new_var_pair(fx, local, layout)
167         } else {
168             CPlace::new_var(fx, local, layout)
169         }
170     } else {
171         CPlace::new_stack_slot(fx, layout)
172     };
173
174     self::comments::add_local_place_comments(fx, place, local);
175
176     place
177 }
178
179 pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block) {
180     fx.bcx.append_block_params_for_function_params(start_block);
181
182     fx.bcx.switch_to_block(start_block);
183     fx.bcx.ins().nop();
184
185     let ssa_analyzed = crate::analyze::analyze(fx);
186
187     self::comments::add_args_header_comment(fx);
188
189     let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
190     let ret_place =
191         self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
192     assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
193
194     // None means pass_mode == NoPass
195     enum ArgKind<'tcx> {
196         Normal(Option<CValue<'tcx>>),
197         Spread(Vec<Option<CValue<'tcx>>>),
198     }
199
200     let fn_abi = fx.fn_abi.take().unwrap();
201
202     // FIXME implement variadics in cranelift
203     if fn_abi.c_variadic {
204         fx.tcx.sess.span_fatal(
205             fx.mir.span,
206             "Defining variadic functions is not yet supported by Cranelift",
207         );
208     }
209
210     let mut arg_abis_iter = fn_abi.args.iter();
211
212     let func_params = fx
213         .mir
214         .args_iter()
215         .map(|local| {
216             let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
217
218             // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
219             if Some(local) == fx.mir.spread_arg {
220                 // This argument (e.g. the last argument in the "rust-call" ABI)
221                 // is a tuple that was spread at the ABI level and now we have
222                 // to reconstruct it into a tuple local variable, from multiple
223                 // individual function arguments.
224
225                 let tupled_arg_tys = match arg_ty.kind() {
226                     ty::Tuple(ref tys) => tys,
227                     _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
228                 };
229
230                 let mut params = Vec::new();
231                 for (i, _arg_ty) in tupled_arg_tys.iter().enumerate() {
232                     let arg_abi = arg_abis_iter.next().unwrap();
233                     let param =
234                         cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
235                     params.push(param);
236                 }
237
238                 (local, ArgKind::Spread(params), arg_ty)
239             } else {
240                 let arg_abi = arg_abis_iter.next().unwrap();
241                 let param =
242                     cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
243                 (local, ArgKind::Normal(param), arg_ty)
244             }
245         })
246         .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
247
248     assert!(fx.caller_location.is_none());
249     if fx.instance.def.requires_caller_location(fx.tcx) {
250         // Store caller location for `#[track_caller]`.
251         let arg_abi = arg_abis_iter.next().unwrap();
252         fx.caller_location =
253             Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
254     }
255
256     assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
257     fx.fn_abi = Some(fn_abi);
258     assert!(block_params_iter.next().is_none(), "arg_value left behind");
259
260     self::comments::add_locals_header_comment(fx);
261
262     for (local, arg_kind, ty) in func_params {
263         let layout = fx.layout_of(ty);
264
265         let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
266
267         // While this is normally an optimization to prevent an unnecessary copy when an argument is
268         // not mutated by the current function, this is necessary to support unsized arguments.
269         if let ArgKind::Normal(Some(val)) = arg_kind {
270             if let Some((addr, meta)) = val.try_to_ptr() {
271                 // Ownership of the value at the backing storage for an argument is passed to the
272                 // callee per the ABI, so it is fine to borrow the backing storage of this argument
273                 // to prevent a copy.
274
275                 let place = if let Some(meta) = meta {
276                     CPlace::for_ptr_with_extra(addr, meta, val.layout())
277                 } else {
278                     CPlace::for_ptr(addr, val.layout())
279                 };
280
281                 self::comments::add_local_place_comments(fx, place, local);
282
283                 assert_eq!(fx.local_map.push(place), local);
284                 continue;
285             }
286         }
287
288         let place = make_local_place(fx, local, layout, is_ssa);
289         assert_eq!(fx.local_map.push(place), local);
290
291         match arg_kind {
292             ArgKind::Normal(param) => {
293                 if let Some(param) = param {
294                     place.write_cvalue(fx, param);
295                 }
296             }
297             ArgKind::Spread(params) => {
298                 for (i, param) in params.into_iter().enumerate() {
299                     if let Some(param) = param {
300                         place.place_field(fx, mir::Field::new(i)).write_cvalue(fx, param);
301                     }
302                 }
303             }
304         }
305     }
306
307     for local in fx.mir.vars_and_temps_iter() {
308         let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
309         let layout = fx.layout_of(ty);
310
311         let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
312
313         let place = make_local_place(fx, local, layout, is_ssa);
314         assert_eq!(fx.local_map.push(place), local);
315     }
316
317     fx.bcx.ins().jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
318 }
319
320 struct CallArgument<'tcx> {
321     value: CValue<'tcx>,
322     is_owned: bool,
323 }
324
325 // FIXME avoid intermediate `CValue` before calling `adjust_arg_for_abi`
326 fn codegen_call_argument_operand<'tcx>(
327     fx: &mut FunctionCx<'_, '_, 'tcx>,
328     operand: &Operand<'tcx>,
329 ) -> CallArgument<'tcx> {
330     CallArgument {
331         value: codegen_operand(fx, operand),
332         is_owned: matches!(operand, Operand::Move(_)),
333     }
334 }
335
336 pub(crate) fn codegen_terminator_call<'tcx>(
337     fx: &mut FunctionCx<'_, '_, 'tcx>,
338     source_info: mir::SourceInfo,
339     func: &Operand<'tcx>,
340     args: &[Operand<'tcx>],
341     destination: Place<'tcx>,
342     target: Option<BasicBlock>,
343 ) {
344     let func = codegen_operand(fx, func);
345     let fn_sig = func.layout().ty.fn_sig(fx.tcx);
346
347     let ret_place = codegen_place(fx, destination);
348
349     // Handle special calls like intrinsics and empty drop glue.
350     let instance = if let ty::FnDef(def_id, substs) = *func.layout().ty.kind() {
351         let instance =
352             ty::Instance::expect_resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
353                 .polymorphize(fx.tcx);
354
355         if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
356             crate::intrinsics::codegen_llvm_intrinsic_call(
357                 fx,
358                 &fx.tcx.symbol_name(instance).name,
359                 substs,
360                 args,
361                 ret_place,
362                 target,
363             );
364             return;
365         }
366
367         match instance.def {
368             InstanceDef::Intrinsic(_) => {
369                 crate::intrinsics::codegen_intrinsic_call(
370                     fx,
371                     instance,
372                     args,
373                     ret_place,
374                     target,
375                     source_info,
376                 );
377                 return;
378             }
379             InstanceDef::DropGlue(_, None) => {
380                 // empty drop glue - a nop.
381                 let dest = target.expect("Non terminating drop_in_place_real???");
382                 let ret_block = fx.get_block(dest);
383                 fx.bcx.ins().jump(ret_block, &[]);
384                 return;
385             }
386             _ => Some(instance),
387         }
388     } else {
389         None
390     };
391
392     let extra_args = &args[fn_sig.inputs().skip_binder().len()..];
393     let extra_args = fx
394         .tcx
395         .mk_type_list(extra_args.iter().map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx))));
396     let fn_abi = if let Some(instance) = instance {
397         RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(instance, extra_args)
398     } else {
399         RevealAllLayoutCx(fx.tcx).fn_abi_of_fn_ptr(fn_sig, extra_args)
400     };
401
402     let is_cold = if fn_sig.abi() == Abi::RustCold {
403         true
404     } else {
405         instance
406             .map(|inst| {
407                 fx.tcx.codegen_fn_attrs(inst.def_id()).flags.contains(CodegenFnAttrFlags::COLD)
408             })
409             .unwrap_or(false)
410     };
411     if is_cold {
412         fx.bcx.set_cold_block(fx.bcx.current_block().unwrap());
413         if let Some(destination_block) = target {
414             fx.bcx.set_cold_block(fx.get_block(destination_block));
415         }
416     }
417
418     // Unpack arguments tuple for closures
419     let mut args = if fn_sig.abi() == Abi::RustCall {
420         assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
421         let self_arg = codegen_call_argument_operand(fx, &args[0]);
422         let pack_arg = codegen_call_argument_operand(fx, &args[1]);
423
424         let tupled_arguments = match pack_arg.value.layout().ty.kind() {
425             ty::Tuple(ref tupled_arguments) => tupled_arguments,
426             _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
427         };
428
429         let mut args = Vec::with_capacity(1 + tupled_arguments.len());
430         args.push(self_arg);
431         for i in 0..tupled_arguments.len() {
432             args.push(CallArgument {
433                 value: pack_arg.value.value_field(fx, mir::Field::new(i)),
434                 is_owned: pack_arg.is_owned,
435             });
436         }
437         args
438     } else {
439         args.iter().map(|arg| codegen_call_argument_operand(fx, arg)).collect::<Vec<_>>()
440     };
441
442     // Pass the caller location for `#[track_caller]`.
443     if instance.map(|inst| inst.def.requires_caller_location(fx.tcx)).unwrap_or(false) {
444         let caller_location = fx.get_caller_location(source_info);
445         args.push(CallArgument { value: caller_location, is_owned: false });
446     }
447
448     let args = args;
449     assert_eq!(fn_abi.args.len(), args.len());
450
451     enum CallTarget {
452         Direct(FuncRef),
453         Indirect(SigRef, Value),
454     }
455
456     let (func_ref, first_arg_override) = match instance {
457         // Trait object call
458         Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
459             if fx.clif_comments.enabled() {
460                 let nop_inst = fx.bcx.ins().nop();
461                 fx.add_comment(
462                     nop_inst,
463                     format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0]),
464                 );
465             }
466
467             let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0].value, idx);
468             let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
469             let sig = fx.bcx.import_signature(sig);
470
471             (CallTarget::Indirect(sig, method), Some(ptr.get_addr(fx)))
472         }
473
474         // Normal call
475         Some(instance) => {
476             let func_ref = fx.get_function_ref(instance);
477             (CallTarget::Direct(func_ref), None)
478         }
479
480         // Indirect call
481         None => {
482             if fx.clif_comments.enabled() {
483                 let nop_inst = fx.bcx.ins().nop();
484                 fx.add_comment(nop_inst, "indirect call");
485             }
486
487             let func = func.load_scalar(fx);
488             let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
489             let sig = fx.bcx.import_signature(sig);
490
491             (CallTarget::Indirect(sig, func), None)
492         }
493     };
494
495     self::returning::codegen_with_call_return_arg(fx, &fn_abi.ret, ret_place, |fx, return_ptr| {
496         let call_args = return_ptr
497             .into_iter()
498             .chain(first_arg_override.into_iter())
499             .chain(
500                 args.into_iter()
501                     .enumerate()
502                     .skip(if first_arg_override.is_some() { 1 } else { 0 })
503                     .map(|(i, arg)| {
504                         adjust_arg_for_abi(fx, arg.value, &fn_abi.args[i], arg.is_owned).into_iter()
505                     })
506                     .flatten(),
507             )
508             .collect::<Vec<Value>>();
509
510         let call_inst = match func_ref {
511             CallTarget::Direct(func_ref) => fx.bcx.ins().call(func_ref, &call_args),
512             CallTarget::Indirect(sig, func_ptr) => {
513                 fx.bcx.ins().call_indirect(sig, func_ptr, &call_args)
514             }
515         };
516
517         // FIXME find a cleaner way to support varargs
518         if fn_sig.c_variadic() {
519             if !matches!(fn_sig.abi(), Abi::C { .. }) {
520                 fx.tcx.sess.span_fatal(
521                     source_info.span,
522                     &format!("Variadic call for non-C abi {:?}", fn_sig.abi()),
523                 );
524             }
525             let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
526             let abi_params = call_args
527                 .into_iter()
528                 .map(|arg| {
529                     let ty = fx.bcx.func.dfg.value_type(arg);
530                     if !ty.is_int() {
531                         // FIXME set %al to upperbound on float args once floats are supported
532                         fx.tcx.sess.span_fatal(
533                             source_info.span,
534                             &format!("Non int ty {:?} for variadic call", ty),
535                         );
536                     }
537                     AbiParam::new(ty)
538                 })
539                 .collect::<Vec<AbiParam>>();
540             fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
541         }
542
543         call_inst
544     });
545
546     if let Some(dest) = target {
547         let ret_block = fx.get_block(dest);
548         fx.bcx.ins().jump(ret_block, &[]);
549     } else {
550         fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
551     }
552 }
553
554 pub(crate) fn codegen_drop<'tcx>(
555     fx: &mut FunctionCx<'_, '_, 'tcx>,
556     source_info: mir::SourceInfo,
557     drop_place: CPlace<'tcx>,
558 ) {
559     let ty = drop_place.layout().ty;
560     let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
561
562     if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
563         // we don't actually need to drop anything
564     } else {
565         match ty.kind() {
566             ty::Dynamic(_, _, ty::Dyn) => {
567                 // IN THIS ARM, WE HAVE:
568                 // ty = *mut (dyn Trait)
569                 // which is: exists<T> ( *mut T,    Vtable<T: Trait> )
570                 //                       args[0]    args[1]
571                 //
572                 // args = ( Data, Vtable )
573                 //                  |
574                 //                  v
575                 //                /-------\
576                 //                | ...   |
577                 //                \-------/
578                 //
579                 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
580                 let ptr = ptr.get_addr(fx);
581                 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
582
583                 // FIXME(eddyb) perhaps move some of this logic into
584                 // `Instance::resolve_drop_in_place`?
585                 let virtual_drop = Instance {
586                     def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
587                     substs: drop_instance.substs,
588                 };
589                 let fn_abi =
590                     RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
591
592                 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
593                 let sig = fx.bcx.import_signature(sig);
594                 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
595             }
596             ty::Dynamic(_, _, ty::DynStar) => {
597                 // IN THIS ARM, WE HAVE:
598                 // ty = *mut (dyn* Trait)
599                 // which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
600                 //
601                 // args = [ * ]
602                 //          |
603                 //          v
604                 //      ( Data, Vtable )
605                 //                |
606                 //                v
607                 //              /-------\
608                 //              | ...   |
609                 //              \-------/
610                 //
611                 //
612                 // WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
613                 //
614                 // data = &(*args[0]).0    // gives a pointer to Data above (really the same pointer)
615                 // vtable = (*args[0]).1   // loads the vtable out
616                 // (data, vtable)          // an equivalent Rust `*mut dyn Trait`
617                 //
618                 // SO THEN WE CAN USE THE ABOVE CODE.
619                 let (data, vtable) = drop_place.to_cvalue(fx).dyn_star_force_data_on_stack(fx);
620                 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable);
621
622                 let virtual_drop = Instance {
623                     def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
624                     substs: drop_instance.substs,
625                 };
626                 let fn_abi =
627                     RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
628
629                 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
630                 let sig = fx.bcx.import_signature(sig);
631                 fx.bcx.ins().call_indirect(sig, drop_fn, &[data]);
632             }
633             _ => {
634                 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
635
636                 let fn_abi =
637                     RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(drop_instance, ty::List::empty());
638
639                 let arg_value = drop_place.place_ref(
640                     fx,
641                     fx.layout_of(fx.tcx.mk_ref(
642                         fx.tcx.lifetimes.re_erased,
643                         TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut },
644                     )),
645                 );
646                 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0], true);
647
648                 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
649
650                 if drop_instance.def.requires_caller_location(fx.tcx) {
651                     // Pass the caller location for `#[track_caller]`.
652                     let caller_location = fx.get_caller_location(source_info);
653                     call_args.extend(
654                         adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1], false).into_iter(),
655                     );
656                 }
657
658                 let func_ref = fx.get_function_ref(drop_instance);
659                 fx.bcx.ins().call(func_ref, &call_args);
660             }
661         }
662     }
663 }