1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
3 #[cfg(debug_assertions)]
8 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
9 use rustc_middle::ty::layout::FnAbiExt;
10 use rustc_target::abi::call::{Conv, FnAbi};
11 use rustc_target::spec::abi::Abi;
13 use cranelift_codegen::ir::AbiParam;
14 use smallvec::smallvec;
16 use self::pass_mode::*;
17 use crate::prelude::*;
19 pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return};
22 // Copied from https://github.com/rust-lang/rust/blob/f52c72948aa1dd718cc1f168d21c91c584c0a662/src/librustc_middle/ty/layout.rs#L2301
24 pub(crate) fn fn_sig_for_fn_abi<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> ty::PolyFnSig<'tcx> {
25 use rustc_middle::ty::subst::Subst;
27 // FIXME(davidtwco,eddyb): A `ParamEnv` should be passed through to this function.
28 let ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
31 // HACK(davidtwco,eddyb): This is a workaround for polymorphization considering
32 // parameters unused if they show up in the signature, but not in the `mir::Body`
33 // (i.e. due to being inside a projection that got normalized, see
34 // `src/test/ui/polymorphization/normalized_sig_types.rs`), and codegen not keeping
35 // track of a polymorphization `ParamEnv` to allow normalizing later.
36 let mut sig = match *ty.kind() {
37 ty::FnDef(def_id, substs) => tcx
38 .normalize_erasing_regions(tcx.param_env(def_id), tcx.fn_sig(def_id))
43 if let ty::InstanceDef::VtableShim(..) = instance.def {
44 // Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`.
45 sig = sig.map_bound(|mut sig| {
46 let mut inputs_and_output = sig.inputs_and_output.to_vec();
47 inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]);
48 sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output);
54 ty::Closure(def_id, substs) => {
55 let sig = substs.as_closure().sig();
57 let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
60 std::iter::once(env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
68 ty::Generator(_, substs, _) => {
69 let sig = substs.as_generator().poly_sig();
71 let env_region = ty::ReLateBound(ty::INNERMOST, ty::BoundRegion { kind: ty::BrEnv });
72 let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
74 let pin_did = tcx.require_lang_item(rustc_hir::LangItem::Pin, None);
75 let pin_adt_ref = tcx.adt_def(pin_did);
76 let pin_substs = tcx.intern_substs(&[env_ty.into()]);
77 let env_ty = tcx.mk_adt(pin_adt_ref, pin_substs);
80 let state_did = tcx.require_lang_item(rustc_hir::LangItem::GeneratorState, None);
81 let state_adt_ref = tcx.adt_def(state_did);
83 tcx.intern_substs(&[sig.yield_ty.into(), sig.return_ty.into()]);
84 let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
87 [env_ty, sig.resume_ty].iter(),
90 rustc_hir::Unsafety::Normal,
91 rustc_target::spec::abi::Abi::Rust,
95 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
99 fn clif_sig_from_fn_abi<'tcx>(
101 triple: &target_lexicon::Triple,
102 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
104 let call_conv = match fn_abi.conv {
105 Conv::Rust | Conv::C => CallConv::triple_default(triple),
106 Conv::X86_64SysV => CallConv::SystemV,
107 Conv::X86_64Win64 => CallConv::WindowsFastcall,
115 | Conv::X86VectorCall
118 | Conv::AvrNonBlockingInterrupt => {
119 todo!("{:?}", fn_abi.conv)
125 .map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter())
128 let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
129 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
130 let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
139 pub(crate) fn get_function_sig<'tcx>(
141 triple: &target_lexicon::Triple,
142 inst: Instance<'tcx>,
144 assert!(!inst.substs.needs_infer());
145 clif_sig_from_fn_abi(
148 &FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]),
152 /// Instance must be monomorphized
153 pub(crate) fn import_function<'tcx>(
155 module: &mut impl Module,
156 inst: Instance<'tcx>,
158 let name = tcx.symbol_name(inst).name.to_string();
159 let sig = get_function_sig(tcx, module.isa().triple(), inst);
161 .declare_function(&name, Linkage::Import, &sig)
165 impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
166 /// Instance must be monomorphized
167 pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
168 let func_id = import_function(self.tcx, &mut self.cx.module, inst);
172 .declare_func_in_func(func_id, &mut self.bcx.func);
174 #[cfg(debug_assertions)]
175 self.add_comment(func_ref, format!("{:?}", inst));
180 pub(crate) fn lib_call(
183 input_tys: Vec<types::Type>,
184 output_tys: Vec<types::Type>,
187 let sig = Signature {
188 params: input_tys.iter().cloned().map(AbiParam::new).collect(),
189 returns: output_tys.iter().cloned().map(AbiParam::new).collect(),
190 call_conv: CallConv::triple_default(self.triple()),
195 .declare_function(&name, Linkage::Import, &sig)
200 .declare_func_in_func(func_id, &mut self.bcx.func);
201 let call_inst = self.bcx.ins().call(func_ref, args);
202 #[cfg(debug_assertions)]
204 self.add_comment(call_inst, format!("easy_call {}", name));
206 let results = self.bcx.inst_results(call_inst);
207 assert!(results.len() <= 2, "{}", results.len());
211 pub(crate) fn easy_call(
214 args: &[CValue<'tcx>],
217 let (input_tys, args): (Vec<_>, Vec<_>) = args
221 self.clif_type(arg.layout().ty).unwrap(),
222 arg.load_scalar(self),
226 let return_layout = self.layout_of(return_ty);
227 let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
228 tup.types().map(|ty| self.clif_type(ty).unwrap()).collect()
230 vec![self.clif_type(return_ty).unwrap()]
232 let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
234 [] => CValue::by_ref(
235 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
238 [val] => CValue::by_val(val, return_layout),
239 [val, extra] => CValue::by_val_pair(val, extra, return_layout),
245 /// Make a [`CPlace`] capable of holding value of the specified type.
246 fn make_local_place<'tcx>(
247 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
249 layout: TyAndLayout<'tcx>,
252 let place = if is_ssa {
253 if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
254 CPlace::new_var_pair(fx, local, layout)
256 CPlace::new_var(fx, local, layout)
259 CPlace::new_stack_slot(fx, layout)
262 #[cfg(debug_assertions)]
263 self::comments::add_local_place_comments(fx, place, local);
268 pub(crate) fn codegen_fn_prelude<'tcx>(
269 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
272 let ssa_analyzed = crate::analyze::analyze(fx);
274 #[cfg(debug_assertions)]
275 self::comments::add_args_header_comment(fx);
277 let ret_place = self::returning::codegen_return_param(fx, &ssa_analyzed, start_block);
278 assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
280 // None means pass_mode == NoPass
282 Normal(Option<CValue<'tcx>>),
283 Spread(Vec<Option<CValue<'tcx>>>),
286 let fn_abi = fx.fn_abi.take().unwrap();
287 let mut arg_abis_iter = fn_abi.args.iter();
293 let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
295 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
296 if Some(local) == fx.mir.spread_arg {
297 // This argument (e.g. the last argument in the "rust-call" ABI)
298 // is a tuple that was spread at the ABI level and now we have
299 // to reconstruct it into a tuple local variable, from multiple
300 // individual function arguments.
302 let tupled_arg_tys = match arg_ty.kind() {
303 ty::Tuple(ref tys) => tys,
304 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
307 let mut params = Vec::new();
308 for (i, _arg_ty) in tupled_arg_tys.types().enumerate() {
309 let arg_abi = arg_abis_iter.next().unwrap();
310 let param = cvalue_for_param(fx, start_block, Some(local), Some(i), arg_abi);
314 (local, ArgKind::Spread(params), arg_ty)
316 let arg_abi = arg_abis_iter.next().unwrap();
317 let param = cvalue_for_param(fx, start_block, Some(local), None, arg_abi);
318 (local, ArgKind::Normal(param), arg_ty)
321 .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
323 assert!(fx.caller_location.is_none());
324 if fx.instance.def.requires_caller_location(fx.tcx) {
325 // Store caller location for `#[track_caller]`.
326 let arg_abi = arg_abis_iter.next().unwrap();
327 fx.caller_location = Some(cvalue_for_param(fx, start_block, None, None, arg_abi).unwrap());
330 assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
331 fx.fn_abi = Some(fn_abi);
333 fx.bcx.switch_to_block(start_block);
336 #[cfg(debug_assertions)]
337 self::comments::add_locals_header_comment(fx);
339 for (local, arg_kind, ty) in func_params {
340 let layout = fx.layout_of(ty);
342 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
344 // While this is normally an optimization to prevent an unnecessary copy when an argument is
345 // not mutated by the current function, this is necessary to support unsized arguments.
346 if let ArgKind::Normal(Some(val)) = arg_kind {
347 if let Some((addr, meta)) = val.try_to_ptr() {
348 let local_decl = &fx.mir.local_decls[local];
349 // v this ! is important
350 let internally_mutable = !val.layout().ty.is_freeze(
351 fx.tcx.at(local_decl.source_info.span),
352 ParamEnv::reveal_all(),
354 if local_decl.mutability == mir::Mutability::Not && !internally_mutable {
355 // We wont mutate this argument, so it is fine to borrow the backing storage
356 // of this argument, to prevent a copy.
358 let place = if let Some(meta) = meta {
359 CPlace::for_ptr_with_extra(addr, meta, val.layout())
361 CPlace::for_ptr(addr, val.layout())
364 #[cfg(debug_assertions)]
365 self::comments::add_local_place_comments(fx, place, local);
367 assert_eq!(fx.local_map.push(place), local);
373 let place = make_local_place(fx, local, layout, is_ssa);
374 assert_eq!(fx.local_map.push(place), local);
377 ArgKind::Normal(param) => {
378 if let Some(param) = param {
379 place.write_cvalue(fx, param);
382 ArgKind::Spread(params) => {
383 for (i, param) in params.into_iter().enumerate() {
384 if let Some(param) = param {
386 .place_field(fx, mir::Field::new(i))
387 .write_cvalue(fx, param);
394 for local in fx.mir.vars_and_temps_iter() {
395 let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
396 let layout = fx.layout_of(ty);
398 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
400 let place = make_local_place(fx, local, layout, is_ssa);
401 assert_eq!(fx.local_map.push(place), local);
406 .jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
409 pub(crate) fn codegen_terminator_call<'tcx>(
410 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
412 current_block: Block,
413 func: &Operand<'tcx>,
414 args: &[Operand<'tcx>],
415 destination: Option<(Place<'tcx>, BasicBlock)>,
417 let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
420 .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
422 let destination = destination.map(|(place, bb)| (codegen_place(fx, place), bb));
424 // Handle special calls like instrinsics and empty drop glue.
425 let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
426 let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
429 .polymorphize(fx.tcx);
431 if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
432 crate::intrinsics::codegen_llvm_intrinsic_call(
434 &fx.tcx.symbol_name(instance).name,
443 InstanceDef::Intrinsic(_) => {
444 crate::intrinsics::codegen_intrinsic_call(fx, instance, args, destination, span);
447 InstanceDef::DropGlue(_, None) => {
448 // empty drop glue - a nop.
449 let (_, dest) = destination.expect("Non terminating drop_in_place_real???");
450 let ret_block = fx.get_block(dest);
451 fx.bcx.ins().jump(ret_block, &[]);
460 let extra_args = &args[fn_sig.inputs().len()..];
461 let extra_args = extra_args
463 .map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
464 .collect::<Vec<_>>();
465 let fn_abi = if let Some(instance) = instance {
466 FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
469 &RevealAllLayoutCx(fx.tcx),
470 fn_ty.fn_sig(fx.tcx),
475 let is_cold = instance
478 .codegen_fn_attrs(inst.def_id())
480 .contains(CodegenFnAttrFlags::COLD)
484 fx.cold_blocks.insert(current_block);
487 // Unpack arguments tuple for closures
488 let args = if fn_sig.abi == Abi::RustCall {
489 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
490 let self_arg = codegen_operand(fx, &args[0]);
491 let pack_arg = codegen_operand(fx, &args[1]);
493 let tupled_arguments = match pack_arg.layout().ty.kind() {
494 ty::Tuple(ref tupled_arguments) => tupled_arguments,
495 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
498 let mut args = Vec::with_capacity(1 + tupled_arguments.len());
500 for i in 0..tupled_arguments.len() {
501 args.push(pack_arg.value_field(fx, mir::Field::new(i)));
506 .map(|arg| codegen_operand(fx, arg))
510 // | indirect call target
511 // | | the first argument to be passed
513 let (func_ref, first_arg) = match instance {
516 def: InstanceDef::Virtual(_, idx),
519 #[cfg(debug_assertions)]
521 let nop_inst = fx.bcx.ins().nop();
524 format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0],),
527 let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
528 (Some(method), smallvec![ptr])
535 .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
536 .unwrap_or(smallvec![]),
541 #[cfg(debug_assertions)]
543 let nop_inst = fx.bcx.ins().nop();
544 fx.add_comment(nop_inst, "indirect call");
546 let func = codegen_operand(fx, func).load_scalar(fx);
550 .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
551 .unwrap_or(smallvec![]),
556 let ret_place = destination.map(|(place, _)| place);
557 let (call_inst, call_args) = self::returning::codegen_with_call_return_arg(
562 let regular_args_count = args.len();
563 let mut call_args: Vec<Value> = return_ptr
565 .chain(first_arg.into_iter())
570 .map(|(i, arg)| adjust_arg_for_abi(fx, arg, &fn_abi.args[i]).into_iter())
573 .collect::<Vec<_>>();
576 .map(|inst| inst.def.requires_caller_location(fx.tcx))
579 // Pass the caller location for `#[track_caller]`.
580 let caller_location = fx.get_caller_location(span);
582 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[regular_args_count])
585 assert_eq!(fn_abi.args.len(), regular_args_count + 1);
587 assert_eq!(fn_abi.args.len(), regular_args_count);
590 let call_inst = if let Some(func_ref) = func_ref {
591 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
592 let sig = fx.bcx.import_signature(sig);
593 fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
596 fx.get_function_ref(instance.expect("non-indirect call on non-FnDef type"));
597 fx.bcx.ins().call(func_ref, &call_args)
600 (call_inst, call_args)
604 // FIXME find a cleaner way to support varargs
605 if fn_sig.c_variadic {
606 if fn_sig.abi != Abi::C {
607 fx.tcx.sess.span_fatal(
609 &format!("Variadic call for non-C abi {:?}", fn_sig.abi),
612 let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
613 let abi_params = call_args
616 let ty = fx.bcx.func.dfg.value_type(arg);
618 // FIXME set %al to upperbound on float args once floats are supported
621 .span_fatal(span, &format!("Non int ty {:?} for variadic call", ty));
625 .collect::<Vec<AbiParam>>();
626 fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
629 if let Some((_, dest)) = destination {
630 let ret_block = fx.get_block(dest);
631 fx.bcx.ins().jump(ret_block, &[]);
633 trap_unreachable(fx, "[corruption] Diverging function returned");
637 pub(crate) fn codegen_drop<'tcx>(
638 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
640 drop_place: CPlace<'tcx>,
642 let ty = drop_place.layout().ty;
643 let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
645 if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
646 // we don't actually need to drop anything
650 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
651 let ptr = ptr.get_addr(fx);
652 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
654 // FIXME(eddyb) perhaps move some of this logic into
655 // `Instance::resolve_drop_in_place`?
656 let virtual_drop = Instance {
657 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
658 substs: drop_instance.substs,
660 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), virtual_drop, &[]);
662 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
663 let sig = fx.bcx.import_signature(sig);
664 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
667 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
669 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), drop_instance, &[]);
671 let arg_value = drop_place.place_ref(
673 fx.layout_of(fx.tcx.mk_ref(
674 &ty::RegionKind::ReErased,
677 mutbl: crate::rustc_hir::Mutability::Mut,
681 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0]);
683 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
685 if drop_instance.def.requires_caller_location(fx.tcx) {
686 // Pass the caller location for `#[track_caller]`.
687 let caller_location = fx.get_caller_location(span);
689 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1]).into_iter(),
693 let func_ref = fx.get_function_ref(drop_instance);
694 fx.bcx.ins().call(func_ref, &call_args);