1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
7 use cranelift_module::ModuleError;
8 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
9 use rustc_middle::ty::layout::FnAbiOf;
10 use rustc_target::abi::call::{Conv, FnAbi};
11 use rustc_target::spec::abi::Abi;
13 use cranelift_codegen::ir::{AbiParam, SigRef};
15 use self::pass_mode::*;
16 use crate::prelude::*;
18 pub(crate) use self::returning::codegen_return;
20 fn clif_sig_from_fn_abi<'tcx>(
22 default_call_conv: CallConv,
23 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
25 let call_conv = conv_to_call_conv(fn_abi.conv, default_call_conv);
27 let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
29 let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
30 // Sometimes the first param is an pointer to the place where the return value needs to be stored.
31 let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
33 Signature { params, returns, call_conv }
36 pub(crate) fn conv_to_call_conv(c: Conv, default_call_conv: CallConv) -> CallConv {
38 Conv::Rust | Conv::C => default_call_conv,
39 Conv::RustCold => CallConv::Cold,
40 Conv::X86_64SysV => CallConv::SystemV,
41 Conv::X86_64Win64 => CallConv::WindowsFastcall,
43 | Conv::CCmseNonSecureCall
53 | Conv::AvrNonBlockingInterrupt => todo!("{:?}", c),
57 pub(crate) fn get_function_sig<'tcx>(
59 default_call_conv: CallConv,
62 assert!(!inst.substs.needs_infer());
66 &RevealAllLayoutCx(tcx).fn_abi_of_instance(inst, ty::List::empty()),
70 /// Instance must be monomorphized
71 pub(crate) fn import_function<'tcx>(
73 module: &mut dyn Module,
76 let name = tcx.symbol_name(inst).name;
77 let sig = get_function_sig(tcx, module.target_config().default_call_conv, inst);
78 match module.declare_function(name, Linkage::Import, &sig) {
79 Ok(func_id) => func_id,
80 Err(ModuleError::IncompatibleDeclaration(_)) => tcx.sess.fatal(&format!(
81 "attempt to declare `{name}` as function, but it was already declared as static"
83 Err(ModuleError::IncompatibleSignature(_, prev_sig, new_sig)) => tcx.sess.fatal(&format!(
84 "attempt to declare `{name}` with signature {new_sig:?}, \
85 but it was already declared with signature {prev_sig:?}"
87 Err(err) => Err::<_, _>(err).unwrap(),
91 impl<'tcx> FunctionCx<'_, '_, 'tcx> {
92 /// Instance must be monomorphized
93 pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
94 let func_id = import_function(self.tcx, self.module, inst);
95 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
97 if self.clif_comments.enabled() {
98 self.add_comment(func_ref, format!("{:?}", inst));
104 pub(crate) fn lib_call(
107 params: Vec<AbiParam>,
108 returns: Vec<AbiParam>,
111 let sig = Signature { params, returns, call_conv: self.target_config.default_call_conv };
112 let func_id = self.module.declare_function(name, Linkage::Import, &sig).unwrap();
113 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
114 if self.clif_comments.enabled() {
115 self.add_comment(func_ref, format!("{:?}", name));
117 let call_inst = self.bcx.ins().call(func_ref, args);
118 if self.clif_comments.enabled() {
119 self.add_comment(call_inst, format!("easy_call {}", name));
121 let results = self.bcx.inst_results(call_inst);
122 assert!(results.len() <= 2, "{}", results.len());
126 pub(crate) fn easy_call(
129 args: &[CValue<'tcx>],
132 let (input_tys, args): (Vec<_>, Vec<_>) = args
135 (AbiParam::new(self.clif_type(arg.layout().ty).unwrap()), arg.load_scalar(self))
138 let return_layout = self.layout_of(return_ty);
139 let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
140 tup.iter().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect()
142 vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
144 let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
146 [] => CValue::by_ref(
147 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
150 [val] => CValue::by_val(val, return_layout),
151 [val, extra] => CValue::by_val_pair(val, extra, return_layout),
157 /// Make a [`CPlace`] capable of holding value of the specified type.
158 fn make_local_place<'tcx>(
159 fx: &mut FunctionCx<'_, '_, 'tcx>,
161 layout: TyAndLayout<'tcx>,
164 let place = if is_ssa {
165 if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
166 CPlace::new_var_pair(fx, local, layout)
168 CPlace::new_var(fx, local, layout)
171 CPlace::new_stack_slot(fx, layout)
174 self::comments::add_local_place_comments(fx, place, local);
179 pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_block: Block) {
180 fx.bcx.append_block_params_for_function_params(start_block);
182 fx.bcx.switch_to_block(start_block);
185 let ssa_analyzed = crate::analyze::analyze(fx);
187 self::comments::add_args_header_comment(fx);
189 let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
191 self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
192 assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
194 // None means pass_mode == NoPass
196 Normal(Option<CValue<'tcx>>),
197 Spread(Vec<Option<CValue<'tcx>>>),
200 let fn_abi = fx.fn_abi.take().unwrap();
202 // FIXME implement variadics in cranelift
203 if fn_abi.c_variadic {
204 fx.tcx.sess.span_fatal(
206 "Defining variadic functions is not yet supported by Cranelift",
210 let mut arg_abis_iter = fn_abi.args.iter();
216 let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
218 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
219 if Some(local) == fx.mir.spread_arg {
220 // This argument (e.g. the last argument in the "rust-call" ABI)
221 // is a tuple that was spread at the ABI level and now we have
222 // to reconstruct it into a tuple local variable, from multiple
223 // individual function arguments.
225 let tupled_arg_tys = match arg_ty.kind() {
226 ty::Tuple(ref tys) => tys,
227 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
230 let mut params = Vec::new();
231 for (i, _arg_ty) in tupled_arg_tys.iter().enumerate() {
232 let arg_abi = arg_abis_iter.next().unwrap();
234 cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
238 (local, ArgKind::Spread(params), arg_ty)
240 let arg_abi = arg_abis_iter.next().unwrap();
242 cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
243 (local, ArgKind::Normal(param), arg_ty)
246 .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
248 assert!(fx.caller_location.is_none());
249 if fx.instance.def.requires_caller_location(fx.tcx) {
250 // Store caller location for `#[track_caller]`.
251 let arg_abi = arg_abis_iter.next().unwrap();
253 Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
256 assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
257 fx.fn_abi = Some(fn_abi);
258 assert!(block_params_iter.next().is_none(), "arg_value left behind");
260 self::comments::add_locals_header_comment(fx);
262 for (local, arg_kind, ty) in func_params {
263 let layout = fx.layout_of(ty);
265 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
267 // While this is normally an optimization to prevent an unnecessary copy when an argument is
268 // not mutated by the current function, this is necessary to support unsized arguments.
269 if let ArgKind::Normal(Some(val)) = arg_kind {
270 if let Some((addr, meta)) = val.try_to_ptr() {
271 // Ownership of the value at the backing storage for an argument is passed to the
272 // callee per the ABI, so it is fine to borrow the backing storage of this argument
273 // to prevent a copy.
275 let place = if let Some(meta) = meta {
276 CPlace::for_ptr_with_extra(addr, meta, val.layout())
278 CPlace::for_ptr(addr, val.layout())
281 self::comments::add_local_place_comments(fx, place, local);
283 assert_eq!(fx.local_map.push(place), local);
288 let place = make_local_place(fx, local, layout, is_ssa);
289 assert_eq!(fx.local_map.push(place), local);
292 ArgKind::Normal(param) => {
293 if let Some(param) = param {
294 place.write_cvalue(fx, param);
297 ArgKind::Spread(params) => {
298 for (i, param) in params.into_iter().enumerate() {
299 if let Some(param) = param {
300 place.place_field(fx, mir::Field::new(i)).write_cvalue(fx, param);
307 for local in fx.mir.vars_and_temps_iter() {
308 let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
309 let layout = fx.layout_of(ty);
311 let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
313 let place = make_local_place(fx, local, layout, is_ssa);
314 assert_eq!(fx.local_map.push(place), local);
317 fx.bcx.ins().jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
320 struct CallArgument<'tcx> {
325 // FIXME avoid intermediate `CValue` before calling `adjust_arg_for_abi`
326 fn codegen_call_argument_operand<'tcx>(
327 fx: &mut FunctionCx<'_, '_, 'tcx>,
328 operand: &Operand<'tcx>,
329 ) -> CallArgument<'tcx> {
331 value: codegen_operand(fx, operand),
332 is_owned: matches!(operand, Operand::Move(_)),
336 pub(crate) fn codegen_terminator_call<'tcx>(
337 fx: &mut FunctionCx<'_, '_, 'tcx>,
338 source_info: mir::SourceInfo,
339 func: &Operand<'tcx>,
340 args: &[Operand<'tcx>],
341 destination: Place<'tcx>,
342 target: Option<BasicBlock>,
344 let func = codegen_operand(fx, func);
345 let fn_sig = func.layout().ty.fn_sig(fx.tcx);
347 let ret_place = codegen_place(fx, destination);
349 // Handle special calls like intrinsics and empty drop glue.
350 let instance = if let ty::FnDef(def_id, substs) = *func.layout().ty.kind() {
352 ty::Instance::expect_resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
353 .polymorphize(fx.tcx);
355 if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
356 crate::intrinsics::codegen_llvm_intrinsic_call(
358 &fx.tcx.symbol_name(instance).name,
368 InstanceDef::Intrinsic(_) => {
369 crate::intrinsics::codegen_intrinsic_call(
379 InstanceDef::DropGlue(_, None) => {
380 // empty drop glue - a nop.
381 let dest = target.expect("Non terminating drop_in_place_real???");
382 let ret_block = fx.get_block(dest);
383 fx.bcx.ins().jump(ret_block, &[]);
392 let extra_args = &args[fn_sig.inputs().skip_binder().len()..];
395 .mk_type_list(extra_args.iter().map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx))));
396 let fn_abi = if let Some(instance) = instance {
397 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(instance, extra_args)
399 RevealAllLayoutCx(fx.tcx).fn_abi_of_fn_ptr(fn_sig, extra_args)
402 let is_cold = if fn_sig.abi() == Abi::RustCold {
407 fx.tcx.codegen_fn_attrs(inst.def_id()).flags.contains(CodegenFnAttrFlags::COLD)
412 fx.bcx.set_cold_block(fx.bcx.current_block().unwrap());
413 if let Some(destination_block) = target {
414 fx.bcx.set_cold_block(fx.get_block(destination_block));
418 // Unpack arguments tuple for closures
419 let mut args = if fn_sig.abi() == Abi::RustCall {
420 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
421 let self_arg = codegen_call_argument_operand(fx, &args[0]);
422 let pack_arg = codegen_call_argument_operand(fx, &args[1]);
424 let tupled_arguments = match pack_arg.value.layout().ty.kind() {
425 ty::Tuple(ref tupled_arguments) => tupled_arguments,
426 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
429 let mut args = Vec::with_capacity(1 + tupled_arguments.len());
431 for i in 0..tupled_arguments.len() {
432 args.push(CallArgument {
433 value: pack_arg.value.value_field(fx, mir::Field::new(i)),
434 is_owned: pack_arg.is_owned,
439 args.iter().map(|arg| codegen_call_argument_operand(fx, arg)).collect::<Vec<_>>()
442 // Pass the caller location for `#[track_caller]`.
443 if instance.map(|inst| inst.def.requires_caller_location(fx.tcx)).unwrap_or(false) {
444 let caller_location = fx.get_caller_location(source_info);
445 args.push(CallArgument { value: caller_location, is_owned: false });
449 assert_eq!(fn_abi.args.len(), args.len());
453 Indirect(SigRef, Value),
456 let (func_ref, first_arg_override) = match instance {
458 Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
459 if fx.clif_comments.enabled() {
460 let nop_inst = fx.bcx.ins().nop();
463 format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0]),
467 let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0].value, idx);
468 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
469 let sig = fx.bcx.import_signature(sig);
471 (CallTarget::Indirect(sig, method), Some(ptr.get_addr(fx)))
476 let func_ref = fx.get_function_ref(instance);
477 (CallTarget::Direct(func_ref), None)
482 if fx.clif_comments.enabled() {
483 let nop_inst = fx.bcx.ins().nop();
484 fx.add_comment(nop_inst, "indirect call");
487 let func = func.load_scalar(fx);
488 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
489 let sig = fx.bcx.import_signature(sig);
491 (CallTarget::Indirect(sig, func), None)
495 self::returning::codegen_with_call_return_arg(fx, &fn_abi.ret, ret_place, |fx, return_ptr| {
496 let call_args = return_ptr
498 .chain(first_arg_override.into_iter())
502 .skip(if first_arg_override.is_some() { 1 } else { 0 })
504 adjust_arg_for_abi(fx, arg.value, &fn_abi.args[i], arg.is_owned).into_iter()
508 .collect::<Vec<Value>>();
510 let call_inst = match func_ref {
511 CallTarget::Direct(func_ref) => fx.bcx.ins().call(func_ref, &call_args),
512 CallTarget::Indirect(sig, func_ptr) => {
513 fx.bcx.ins().call_indirect(sig, func_ptr, &call_args)
517 // FIXME find a cleaner way to support varargs
518 if fn_sig.c_variadic() {
519 if !matches!(fn_sig.abi(), Abi::C { .. }) {
520 fx.tcx.sess.span_fatal(
522 &format!("Variadic call for non-C abi {:?}", fn_sig.abi()),
525 let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
526 let abi_params = call_args
529 let ty = fx.bcx.func.dfg.value_type(arg);
531 // FIXME set %al to upperbound on float args once floats are supported
532 fx.tcx.sess.span_fatal(
534 &format!("Non int ty {:?} for variadic call", ty),
539 .collect::<Vec<AbiParam>>();
540 fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
546 if let Some(dest) = target {
547 let ret_block = fx.get_block(dest);
548 fx.bcx.ins().jump(ret_block, &[]);
550 fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
554 pub(crate) fn codegen_drop<'tcx>(
555 fx: &mut FunctionCx<'_, '_, 'tcx>,
556 source_info: mir::SourceInfo,
557 drop_place: CPlace<'tcx>,
559 let ty = drop_place.layout().ty;
560 let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
562 if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
563 // we don't actually need to drop anything
566 ty::Dynamic(_, _, ty::Dyn) => {
567 // IN THIS ARM, WE HAVE:
568 // ty = *mut (dyn Trait)
569 // which is: exists<T> ( *mut T, Vtable<T: Trait> )
572 // args = ( Data, Vtable )
579 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
580 let ptr = ptr.get_addr(fx);
581 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
583 // FIXME(eddyb) perhaps move some of this logic into
584 // `Instance::resolve_drop_in_place`?
585 let virtual_drop = Instance {
586 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
587 substs: drop_instance.substs,
590 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
592 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
593 let sig = fx.bcx.import_signature(sig);
594 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
596 ty::Dynamic(_, _, ty::DynStar) => {
597 // IN THIS ARM, WE HAVE:
598 // ty = *mut (dyn* Trait)
599 // which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
612 // WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
614 // data = &(*args[0]).0 // gives a pointer to Data above (really the same pointer)
615 // vtable = (*args[0]).1 // loads the vtable out
616 // (data, vtable) // an equivalent Rust `*mut dyn Trait`
618 // SO THEN WE CAN USE THE ABOVE CODE.
619 let (data, vtable) = drop_place.to_cvalue(fx).dyn_star_force_data_on_stack(fx);
620 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable);
622 let virtual_drop = Instance {
623 def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
624 substs: drop_instance.substs,
627 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(virtual_drop, ty::List::empty());
629 let sig = clif_sig_from_fn_abi(fx.tcx, fx.target_config.default_call_conv, &fn_abi);
630 let sig = fx.bcx.import_signature(sig);
631 fx.bcx.ins().call_indirect(sig, drop_fn, &[data]);
634 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
637 RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(drop_instance, ty::List::empty());
639 let arg_value = drop_place.place_ref(
641 fx.layout_of(fx.tcx.mk_ref(
642 fx.tcx.lifetimes.re_erased,
643 TypeAndMut { ty, mutbl: crate::rustc_hir::Mutability::Mut },
646 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0], true);
648 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
650 if drop_instance.def.requires_caller_location(fx.tcx) {
651 // Pass the caller location for `#[track_caller]`.
652 let caller_location = fx.get_caller_location(source_info);
654 adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1], false).into_iter(),
658 let func_ref = fx.get_function_ref(drop_instance);
659 fx.bcx.ins().call(func_ref, &call_args);