]> git.lizzy.rs Git - rust.git/blob - src/abi/mod.rs
Auto merge of #76570 - cratelyn:implement-rfc-2945-c-unwind-abi, r=Amanieu
[rust.git] / src / abi / mod.rs
1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
2
3 #[cfg(debug_assertions)]
4 mod comments;
5 mod pass_mode;
6 mod returning;
7
8 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
9 use rustc_middle::ty::layout::FnAbiExt;
10 use rustc_target::abi::call::{Conv, FnAbi};
11 use rustc_target::spec::abi::Abi;
12
13 use cranelift_codegen::ir::AbiParam;
14 use smallvec::smallvec;
15
16 use self::pass_mode::*;
17 use crate::prelude::*;
18
19 pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return};
20
21 fn clif_sig_from_fn_abi<'tcx>(
22     tcx: TyCtxt<'tcx>,
23     triple: &target_lexicon::Triple,
24     fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
25 ) -> Signature {
26     let call_conv = match fn_abi.conv {
27         Conv::Rust | Conv::C => CallConv::triple_default(triple),
28         Conv::X86_64SysV => CallConv::SystemV,
29         Conv::X86_64Win64 => CallConv::WindowsFastcall,
30         Conv::ArmAapcs
31         | Conv::CCmseNonSecureCall
32         | Conv::Msp430Intr
33         | Conv::PtxKernel
34         | Conv::X86Fastcall
35         | Conv::X86Intr
36         | Conv::X86Stdcall
37         | Conv::X86ThisCall
38         | Conv::X86VectorCall
39         | Conv::AmdGpuKernel
40         | Conv::AvrInterrupt
41         | Conv::AvrNonBlockingInterrupt => todo!("{:?}", fn_abi.conv),
42     };
43     let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
44
45     let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
46     // Sometimes the first param is an pointer to the place where the return value needs to be stored.
47     let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
48
49     Signature { params, returns, call_conv }
50 }
51
52 pub(crate) fn get_function_sig<'tcx>(
53     tcx: TyCtxt<'tcx>,
54     triple: &target_lexicon::Triple,
55     inst: Instance<'tcx>,
56 ) -> Signature {
57     assert!(!inst.substs.needs_infer());
58     clif_sig_from_fn_abi(tcx, triple, &FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]))
59 }
60
61 /// Instance must be monomorphized
62 pub(crate) fn import_function<'tcx>(
63     tcx: TyCtxt<'tcx>,
64     module: &mut dyn Module,
65     inst: Instance<'tcx>,
66 ) -> FuncId {
67     let name = tcx.symbol_name(inst).name.to_string();
68     let sig = get_function_sig(tcx, module.isa().triple(), inst);
69     module.declare_function(&name, Linkage::Import, &sig).unwrap()
70 }
71
72 impl<'tcx> FunctionCx<'_, '_, 'tcx> {
73     /// Instance must be monomorphized
74     pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
75         let func_id = import_function(self.tcx, self.cx.module, inst);
76         let func_ref = self.cx.module.declare_func_in_func(func_id, &mut self.bcx.func);
77
78         #[cfg(debug_assertions)]
79         self.add_comment(func_ref, format!("{:?}", inst));
80
81         func_ref
82     }
83
84     pub(crate) fn lib_call(
85         &mut self,
86         name: &str,
87         params: Vec<AbiParam>,
88         returns: Vec<AbiParam>,
89         args: &[Value],
90     ) -> &[Value] {
91         let sig = Signature { params, returns, call_conv: CallConv::triple_default(self.triple()) };
92         let func_id = self.cx.module.declare_function(&name, Linkage::Import, &sig).unwrap();
93         let func_ref = self.cx.module.declare_func_in_func(func_id, &mut self.bcx.func);
94         let call_inst = self.bcx.ins().call(func_ref, args);
95         #[cfg(debug_assertions)]
96         {
97             self.add_comment(call_inst, format!("easy_call {}", name));
98         }
99         let results = self.bcx.inst_results(call_inst);
100         assert!(results.len() <= 2, "{}", results.len());
101         results
102     }
103
104     pub(crate) fn easy_call(
105         &mut self,
106         name: &str,
107         args: &[CValue<'tcx>],
108         return_ty: Ty<'tcx>,
109     ) -> CValue<'tcx> {
110         let (input_tys, args): (Vec<_>, Vec<_>) = args
111             .iter()
112             .map(|arg| {
113                 (AbiParam::new(self.clif_type(arg.layout().ty).unwrap()), arg.load_scalar(self))
114             })
115             .unzip();
116         let return_layout = self.layout_of(return_ty);
117         let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
118             tup.types().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect()
119         } else {
120             vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
121         };
122         let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
123         match *ret_vals {
124             [] => CValue::by_ref(
125                 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
126                 return_layout,
127             ),
128             [val] => CValue::by_val(val, return_layout),
129             [val, extra] => CValue::by_val_pair(val, extra, return_layout),
130             _ => unreachable!(),
131         }
132     }
133 }
134
135 /// Make a [`CPlace`] capable of holding value of the specified type.
136 fn make_local_place<'tcx>(
137     fx: &mut FunctionCx<'_, '_, 'tcx>,
138     local: Local,
139     layout: TyAndLayout<'tcx>,
140     is_ssa: bool,
141 ) -> CPlace<'tcx> {
142     let place = if is_ssa {
143         if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
144             CPlace::new_var_pair(fx, local, layout)
145         } else {
146             CPlace::new_var(fx, local, layout)
147         }
148     } else {
149         CPlace::new_stack_slot(fx, layout)
150     };
151
152     #[cfg(debug_assertions)]
153     self::comments::add_local_place_comments(fx, place, local);
154
155     place
156 }
157
158 pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block) {
159     fx.bcx.append_block_params_for_function_params(start_block);
160
161     fx.bcx.switch_to_block(start_block);
162     fx.bcx.ins().nop();
163
164     let ssa_analyzed = crate::analyze::analyze(fx);
165
166     #[cfg(debug_assertions)]
167     self::comments::add_args_header_comment(fx);
168
169     let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
170     let ret_place =
171         self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
172     assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
173
174     // None means pass_mode == NoPass
175     enum ArgKind<'tcx> {
176         Normal(Option<CValue<'tcx>>),
177         Spread(Vec<Option<CValue<'tcx>>>),
178     }
179
180     let fn_abi = fx.fn_abi.take().unwrap();
181     let mut arg_abis_iter = fn_abi.args.iter();
182
183     let func_params = fx
184         .mir
185         .args_iter()
186         .map(|local| {
187             let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
188
189             // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
190             if Some(local) == fx.mir.spread_arg {
191                 // This argument (e.g. the last argument in the "rust-call" ABI)
192                 // is a tuple that was spread at the ABI level and now we have
193                 // to reconstruct it into a tuple local variable, from multiple
194                 // individual function arguments.
195
196                 let tupled_arg_tys = match arg_ty.kind() {
197                     ty::Tuple(ref tys) => tys,
198                     _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
199                 };
200
201                 let mut params = Vec::new();
202                 for (i, _arg_ty) in tupled_arg_tys.types().enumerate() {
203                     let arg_abi = arg_abis_iter.next().unwrap();
204                     let param =
205                         cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
206                     params.push(param);
207                 }
208
209                 (local, ArgKind::Spread(params), arg_ty)
210             } else {
211                 let arg_abi = arg_abis_iter.next().unwrap();
212                 let param =
213                     cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
214                 (local, ArgKind::Normal(param), arg_ty)
215             }
216         })
217         .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
218
219     assert!(fx.caller_location.is_none());
220     if fx.instance.def.requires_caller_location(fx.tcx) {
221         // Store caller location for `#[track_caller]`.
222         let arg_abi = arg_abis_iter.next().unwrap();
223         fx.caller_location =
224             Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
225     }
226
227     assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
228     fx.fn_abi = Some(fn_abi);
229     assert!(block_params_iter.next().is_none(), "arg_value left behind");
230
231     #[cfg(debug_assertions)]
232     self::comments::add_locals_header_comment(fx);
233
234     for (local, arg_kind, ty) in func_params {
235         let layout = fx.layout_of(ty);
236
237         let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
238
239         // While this is normally an optimization to prevent an unnecessary copy when an argument is
240         // not mutated by the current function, this is necessary to support unsized arguments.
241         if let ArgKind::Normal(Some(val)) = arg_kind {
242             if let Some((addr, meta)) = val.try_to_ptr() {
243                 let local_decl = &fx.mir.local_decls[local];
244                 //                       v this ! is important
245                 let internally_mutable = !val
246                     .layout()
247                     .ty
248                     .is_freeze(fx.tcx.at(local_decl.source_info.span), ParamEnv::reveal_all());
249                 if local_decl.mutability == mir::Mutability::Not && !internally_mutable {
250                     // We wont mutate this argument, so it is fine to borrow the backing storage
251                     // of this argument, to prevent a copy.
252
253                     let place = if let Some(meta) = meta {
254                         CPlace::for_ptr_with_extra(addr, meta, val.layout())
255                     } else {
256                         CPlace::for_ptr(addr, val.layout())
257                     };
258
259                     #[cfg(debug_assertions)]
260                     self::comments::add_local_place_comments(fx, place, local);
261
262                     assert_eq!(fx.local_map.push(place), local);
263                     continue;
264                 }
265             }
266         }
267
268         let place = make_local_place(fx, local, layout, is_ssa);
269         assert_eq!(fx.local_map.push(place), local);
270
271         match arg_kind {
272             ArgKind::Normal(param) => {
273                 if let Some(param) = param {
274                     place.write_cvalue(fx, param);
275                 }
276             }
277             ArgKind::Spread(params) => {
278                 for (i, param) in params.into_iter().enumerate() {
279                     if let Some(param) = param {
280                         place.place_field(fx, mir::Field::new(i)).write_cvalue(fx, param);
281                     }
282                 }
283             }
284         }
285     }
286
287     for local in fx.mir.vars_and_temps_iter() {
288         let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
289         let layout = fx.layout_of(ty);
290
291         let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
292
293         let place = make_local_place(fx, local, layout, is_ssa);
294         assert_eq!(fx.local_map.push(place), local);
295     }
296
297     fx.bcx.ins().jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
298 }
299
300 pub(crate) fn codegen_terminator_call<'tcx>(
301     fx: &mut FunctionCx<'_, '_, 'tcx>,
302     span: Span,
303     current_block: Block,
304     func: &Operand<'tcx>,
305     args: &[Operand<'tcx>],
306     destination: Option<(Place<'tcx>, BasicBlock)>,
307 ) {
308     let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
309     let fn_sig =
310         fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
311
312     let destination = destination.map(|(place, bb)| (codegen_place(fx, place), bb));
313
314     // Handle special calls like instrinsics and empty drop glue.
315     let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
316         let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
317             .unwrap()
318             .unwrap()
319             .polymorphize(fx.tcx);
320
321         if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
322             crate::intrinsics::codegen_llvm_intrinsic_call(
323                 fx,
324                 &fx.tcx.symbol_name(instance).name,
325                 substs,
326                 args,
327                 destination,
328             );
329             return;
330         }
331
332         match instance.def {
333             InstanceDef::Intrinsic(_) => {
334                 crate::intrinsics::codegen_intrinsic_call(fx, instance, args, destination, span);
335                 return;
336             }
337             InstanceDef::DropGlue(_, None) => {
338                 // empty drop glue - a nop.
339                 let (_, dest) = destination.expect("Non terminating drop_in_place_real???");
340                 let ret_block = fx.get_block(dest);
341                 fx.bcx.ins().jump(ret_block, &[]);
342                 return;
343             }
344             _ => Some(instance),
345         }
346     } else {
347         None
348     };
349
350     let extra_args = &args[fn_sig.inputs().len()..];
351     let extra_args = extra_args
352         .iter()
353         .map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
354         .collect::<Vec<_>>();
355     let fn_abi = if let Some(instance) = instance {
356         FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
357     } else {
358         FnAbi::of_fn_ptr(&RevealAllLayoutCx(fx.tcx), fn_ty.fn_sig(fx.tcx), &extra_args)
359     };
360
361     let is_cold = instance
362         .map(|inst| fx.tcx.codegen_fn_attrs(inst.def_id()).flags.contains(CodegenFnAttrFlags::COLD))
363         .unwrap_or(false);
364     if is_cold {
365         fx.cold_blocks.insert(current_block);
366     }
367
368     // Unpack arguments tuple for closures
369     let args = if fn_sig.abi == Abi::RustCall {
370         assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
371         let self_arg = codegen_operand(fx, &args[0]);
372         let pack_arg = codegen_operand(fx, &args[1]);
373
374         let tupled_arguments = match pack_arg.layout().ty.kind() {
375             ty::Tuple(ref tupled_arguments) => tupled_arguments,
376             _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
377         };
378
379         let mut args = Vec::with_capacity(1 + tupled_arguments.len());
380         args.push(self_arg);
381         for i in 0..tupled_arguments.len() {
382             args.push(pack_arg.value_field(fx, mir::Field::new(i)));
383         }
384         args
385     } else {
386         args.iter().map(|arg| codegen_operand(fx, arg)).collect::<Vec<_>>()
387     };
388
389     //   | indirect call target
390     //   |         | the first argument to be passed
391     //   v         v
392     let (func_ref, first_arg) = match instance {
393         // Trait object call
394         Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
395             #[cfg(debug_assertions)]
396             {
397                 let nop_inst = fx.bcx.ins().nop();
398                 fx.add_comment(
399                     nop_inst,
400                     format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0],),
401                 );
402             }
403             let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
404             (Some(method), smallvec![ptr])
405         }
406
407         // Normal call
408         Some(_) => (
409             None,
410             args.get(0)
411                 .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
412                 .unwrap_or(smallvec![]),
413         ),
414
415         // Indirect call
416         None => {
417             #[cfg(debug_assertions)]
418             {
419                 let nop_inst = fx.bcx.ins().nop();
420                 fx.add_comment(nop_inst, "indirect call");
421             }
422             let func = codegen_operand(fx, func).load_scalar(fx);
423             (
424                 Some(func),
425                 args.get(0)
426                     .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
427                     .unwrap_or(smallvec![]),
428             )
429         }
430     };
431
432     let ret_place = destination.map(|(place, _)| place);
433     let (call_inst, call_args) = self::returning::codegen_with_call_return_arg(
434         fx,
435         &fn_abi.ret,
436         ret_place,
437         |fx, return_ptr| {
438             let regular_args_count = args.len();
439             let mut call_args: Vec<Value> = return_ptr
440                 .into_iter()
441                 .chain(first_arg.into_iter())
442                 .chain(
443                     args.into_iter()
444                         .enumerate()
445                         .skip(1)
446                         .map(|(i, arg)| adjust_arg_for_abi(fx, arg, &fn_abi.args[i]).into_iter())
447                         .flatten(),
448                 )
449                 .collect::<Vec<_>>();
450
451             if instance.map(|inst| inst.def.requires_caller_location(fx.tcx)).unwrap_or(false) {
452                 // Pass the caller location for `#[track_caller]`.
453                 let caller_location = fx.get_caller_location(span);
454                 call_args.extend(
455                     adjust_arg_for_abi(fx, caller_location, &fn_abi.args[regular_args_count])
456                         .into_iter(),
457                 );
458                 assert_eq!(fn_abi.args.len(), regular_args_count + 1);
459             } else {
460                 assert_eq!(fn_abi.args.len(), regular_args_count);
461             }
462
463             let call_inst = if let Some(func_ref) = func_ref {
464                 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
465                 let sig = fx.bcx.import_signature(sig);
466                 fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
467             } else {
468                 let func_ref =
469                     fx.get_function_ref(instance.expect("non-indirect call on non-FnDef type"));
470                 fx.bcx.ins().call(func_ref, &call_args)
471             };
472
473             (call_inst, call_args)
474         },
475     );
476
477     // FIXME find a cleaner way to support varargs
478     if fn_sig.c_variadic {
479         if !matches!(fn_sig.abi, Abi::C { .. }) {
480             fx.tcx.sess.span_fatal(
481                 span,
482                 &format!("Variadic call for non-C abi {:?}", fn_sig.abi),
483             );
484         }
485         let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
486         let abi_params = call_args
487             .into_iter()
488             .map(|arg| {
489                 let ty = fx.bcx.func.dfg.value_type(arg);
490                 if !ty.is_int() {
491                     // FIXME set %al to upperbound on float args once floats are supported
492                     fx.tcx.sess.span_fatal(span, &format!("Non int ty {:?} for variadic call", ty));
493                 }
494                 AbiParam::new(ty)
495             })
496             .collect::<Vec<AbiParam>>();
497         fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
498     }
499
500     if let Some((_, dest)) = destination {
501         let ret_block = fx.get_block(dest);
502         fx.bcx.ins().jump(ret_block, &[]);
503     } else {
504         trap_unreachable(fx, "[corruption] Diverging function returned");
505     }
506 }
507
508 pub(crate) fn codegen_drop<'tcx>(
509     fx: &mut FunctionCx<'_, '_, 'tcx>,
510     span: Span,
511     drop_place: CPlace<'tcx>,
512 ) {
513     let ty = drop_place.layout().ty;
514     let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
515
516     if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
517         // we don't actually need to drop anything
518     } else {
519         match ty.kind() {
520             ty::Dynamic(..) => {
521                 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
522                 let ptr = ptr.get_addr(fx);
523                 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
524
525                 // FIXME(eddyb) perhaps move some of this logic into
526                 // `Instance::resolve_drop_in_place`?
527                 let virtual_drop = Instance {
528                     def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
529                     substs: drop_instance.substs,
530                 };
531                 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), virtual_drop, &[]);
532
533                 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
534                 let sig = fx.bcx.import_signature(sig);
535                 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
536             }
537             _ => {
538                 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
539
540                 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), drop_instance, &[]);
541
542                 let arg_value = drop_place.place_ref(
543                     fx,
544                     fx.layout_of(fx.tcx.mk_ref(
545                         &ty::RegionKind::ReErased,
546                         TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut },
547                     )),
548                 );
549                 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0]);
550
551                 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
552
553                 if drop_instance.def.requires_caller_location(fx.tcx) {
554                     // Pass the caller location for `#[track_caller]`.
555                     let caller_location = fx.get_caller_location(span);
556                     call_args.extend(
557                         adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1]).into_iter(),
558                     );
559                 }
560
561                 let func_ref = fx.get_function_ref(drop_instance);
562                 fx.bcx.ins().call(func_ref, &call_args);
563             }
564         }
565     }
566 }