]> git.lizzy.rs Git - rust.git/blob - src/abi/mod.rs
Fix 128bit checked math intrinsic calls
[rust.git] / src / abi / mod.rs
1 //! Handling of everything related to the calling convention. Also fills `fx.local_map`.
2
3 #[cfg(debug_assertions)]
4 mod comments;
5 mod pass_mode;
6 mod returning;
7
8 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
9 use rustc_middle::ty::layout::FnAbiExt;
10 use rustc_target::abi::call::{Conv, FnAbi};
11 use rustc_target::spec::abi::Abi;
12
13 use cranelift_codegen::ir::AbiParam;
14 use smallvec::smallvec;
15
16 use self::pass_mode::*;
17 use crate::prelude::*;
18
19 pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return};
20
21 fn clif_sig_from_fn_abi<'tcx>(
22     tcx: TyCtxt<'tcx>,
23     triple: &target_lexicon::Triple,
24     fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
25 ) -> Signature {
26     let call_conv = match fn_abi.conv {
27         Conv::Rust | Conv::C => CallConv::triple_default(triple),
28         Conv::X86_64SysV => CallConv::SystemV,
29         Conv::X86_64Win64 => CallConv::WindowsFastcall,
30         Conv::ArmAapcs
31         | Conv::Msp430Intr
32         | Conv::PtxKernel
33         | Conv::X86Fastcall
34         | Conv::X86Intr
35         | Conv::X86Stdcall
36         | Conv::X86ThisCall
37         | Conv::X86VectorCall
38         | Conv::AmdGpuKernel
39         | Conv::AvrInterrupt
40         | Conv::AvrNonBlockingInterrupt => {
41             todo!("{:?}", fn_abi.conv)
42         }
43     };
44     let inputs = fn_abi
45         .args
46         .iter()
47         .map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter())
48         .flatten();
49
50     let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
51     // Sometimes the first param is an pointer to the place where the return value needs to be stored.
52     let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
53
54     Signature {
55         params,
56         returns,
57         call_conv,
58     }
59 }
60
61 pub(crate) fn get_function_sig<'tcx>(
62     tcx: TyCtxt<'tcx>,
63     triple: &target_lexicon::Triple,
64     inst: Instance<'tcx>,
65 ) -> Signature {
66     assert!(!inst.substs.needs_infer());
67     clif_sig_from_fn_abi(
68         tcx,
69         triple,
70         &FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]),
71     )
72 }
73
74 /// Instance must be monomorphized
75 pub(crate) fn import_function<'tcx>(
76     tcx: TyCtxt<'tcx>,
77     module: &mut impl Module,
78     inst: Instance<'tcx>,
79 ) -> FuncId {
80     let name = tcx.symbol_name(inst).name.to_string();
81     let sig = get_function_sig(tcx, module.isa().triple(), inst);
82     module
83         .declare_function(&name, Linkage::Import, &sig)
84         .unwrap()
85 }
86
87 impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
88     /// Instance must be monomorphized
89     pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
90         let func_id = import_function(self.tcx, &mut self.cx.module, inst);
91         let func_ref = self
92             .cx
93             .module
94             .declare_func_in_func(func_id, &mut self.bcx.func);
95
96         #[cfg(debug_assertions)]
97         self.add_comment(func_ref, format!("{:?}", inst));
98
99         func_ref
100     }
101
102     pub(crate) fn lib_call(
103         &mut self,
104         name: &str,
105         params: Vec<AbiParam>,
106         returns: Vec<AbiParam>,
107         args: &[Value],
108     ) -> &[Value] {
109         let sig = Signature {
110             params,
111             returns,
112             call_conv: CallConv::triple_default(self.triple()),
113         };
114         let func_id = self
115             .cx
116             .module
117             .declare_function(&name, Linkage::Import, &sig)
118             .unwrap();
119         let func_ref = self
120             .cx
121             .module
122             .declare_func_in_func(func_id, &mut self.bcx.func);
123         let call_inst = self.bcx.ins().call(func_ref, args);
124         #[cfg(debug_assertions)]
125         {
126             self.add_comment(call_inst, format!("easy_call {}", name));
127         }
128         let results = self.bcx.inst_results(call_inst);
129         assert!(results.len() <= 2, "{}", results.len());
130         results
131     }
132
133     pub(crate) fn easy_call(
134         &mut self,
135         name: &str,
136         args: &[CValue<'tcx>],
137         return_ty: Ty<'tcx>,
138     ) -> CValue<'tcx> {
139         let (input_tys, args): (Vec<_>, Vec<_>) = args
140             .iter()
141             .map(|arg| {
142                 (
143                     AbiParam::new(self.clif_type(arg.layout().ty).unwrap()),
144                     arg.load_scalar(self),
145                 )
146             })
147             .unzip();
148         let return_layout = self.layout_of(return_ty);
149         let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
150             tup.types()
151                 .map(|ty| AbiParam::new(self.clif_type(ty).unwrap()))
152                 .collect()
153         } else {
154             vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
155         };
156         let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
157         match *ret_vals {
158             [] => CValue::by_ref(
159                 Pointer::const_addr(self, i64::from(self.pointer_type.bytes())),
160                 return_layout,
161             ),
162             [val] => CValue::by_val(val, return_layout),
163             [val, extra] => CValue::by_val_pair(val, extra, return_layout),
164             _ => unreachable!(),
165         }
166     }
167 }
168
169 /// Make a [`CPlace`] capable of holding value of the specified type.
170 fn make_local_place<'tcx>(
171     fx: &mut FunctionCx<'_, 'tcx, impl Module>,
172     local: Local,
173     layout: TyAndLayout<'tcx>,
174     is_ssa: bool,
175 ) -> CPlace<'tcx> {
176     let place = if is_ssa {
177         if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi {
178             CPlace::new_var_pair(fx, local, layout)
179         } else {
180             CPlace::new_var(fx, local, layout)
181         }
182     } else {
183         CPlace::new_stack_slot(fx, layout)
184     };
185
186     #[cfg(debug_assertions)]
187     self::comments::add_local_place_comments(fx, place, local);
188
189     place
190 }
191
192 pub(crate) fn codegen_fn_prelude<'tcx>(
193     fx: &mut FunctionCx<'_, 'tcx, impl Module>,
194     start_block: Block,
195 ) {
196     fx.bcx.append_block_params_for_function_params(start_block);
197
198     fx.bcx.switch_to_block(start_block);
199     fx.bcx.ins().nop();
200
201     let ssa_analyzed = crate::analyze::analyze(fx);
202
203     #[cfg(debug_assertions)]
204     self::comments::add_args_header_comment(fx);
205
206     let mut block_params_iter = fx
207         .bcx
208         .func
209         .dfg
210         .block_params(start_block)
211         .to_vec()
212         .into_iter();
213     let ret_place =
214         self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
215     assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
216
217     // None means pass_mode == NoPass
218     enum ArgKind<'tcx> {
219         Normal(Option<CValue<'tcx>>),
220         Spread(Vec<Option<CValue<'tcx>>>),
221     }
222
223     let fn_abi = fx.fn_abi.take().unwrap();
224     let mut arg_abis_iter = fn_abi.args.iter();
225
226     let func_params = fx
227         .mir
228         .args_iter()
229         .map(|local| {
230             let arg_ty = fx.monomorphize(fx.mir.local_decls[local].ty);
231
232             // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
233             if Some(local) == fx.mir.spread_arg {
234                 // This argument (e.g. the last argument in the "rust-call" ABI)
235                 // is a tuple that was spread at the ABI level and now we have
236                 // to reconstruct it into a tuple local variable, from multiple
237                 // individual function arguments.
238
239                 let tupled_arg_tys = match arg_ty.kind() {
240                     ty::Tuple(ref tys) => tys,
241                     _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
242                 };
243
244                 let mut params = Vec::new();
245                 for (i, _arg_ty) in tupled_arg_tys.types().enumerate() {
246                     let arg_abi = arg_abis_iter.next().unwrap();
247                     let param =
248                         cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
249                     params.push(param);
250                 }
251
252                 (local, ArgKind::Spread(params), arg_ty)
253             } else {
254                 let arg_abi = arg_abis_iter.next().unwrap();
255                 let param =
256                     cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
257                 (local, ArgKind::Normal(param), arg_ty)
258             }
259         })
260         .collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
261
262     assert!(fx.caller_location.is_none());
263     if fx.instance.def.requires_caller_location(fx.tcx) {
264         // Store caller location for `#[track_caller]`.
265         let arg_abi = arg_abis_iter.next().unwrap();
266         fx.caller_location =
267             Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
268     }
269
270     assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
271     fx.fn_abi = Some(fn_abi);
272     assert!(block_params_iter.next().is_none(), "arg_value left behind");
273
274     #[cfg(debug_assertions)]
275     self::comments::add_locals_header_comment(fx);
276
277     for (local, arg_kind, ty) in func_params {
278         let layout = fx.layout_of(ty);
279
280         let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
281
282         // While this is normally an optimization to prevent an unnecessary copy when an argument is
283         // not mutated by the current function, this is necessary to support unsized arguments.
284         if let ArgKind::Normal(Some(val)) = arg_kind {
285             if let Some((addr, meta)) = val.try_to_ptr() {
286                 let local_decl = &fx.mir.local_decls[local];
287                 //                       v this ! is important
288                 let internally_mutable = !val.layout().ty.is_freeze(
289                     fx.tcx.at(local_decl.source_info.span),
290                     ParamEnv::reveal_all(),
291                 );
292                 if local_decl.mutability == mir::Mutability::Not && !internally_mutable {
293                     // We wont mutate this argument, so it is fine to borrow the backing storage
294                     // of this argument, to prevent a copy.
295
296                     let place = if let Some(meta) = meta {
297                         CPlace::for_ptr_with_extra(addr, meta, val.layout())
298                     } else {
299                         CPlace::for_ptr(addr, val.layout())
300                     };
301
302                     #[cfg(debug_assertions)]
303                     self::comments::add_local_place_comments(fx, place, local);
304
305                     assert_eq!(fx.local_map.push(place), local);
306                     continue;
307                 }
308             }
309         }
310
311         let place = make_local_place(fx, local, layout, is_ssa);
312         assert_eq!(fx.local_map.push(place), local);
313
314         match arg_kind {
315             ArgKind::Normal(param) => {
316                 if let Some(param) = param {
317                     place.write_cvalue(fx, param);
318                 }
319             }
320             ArgKind::Spread(params) => {
321                 for (i, param) in params.into_iter().enumerate() {
322                     if let Some(param) = param {
323                         place
324                             .place_field(fx, mir::Field::new(i))
325                             .write_cvalue(fx, param);
326                     }
327                 }
328             }
329         }
330     }
331
332     for local in fx.mir.vars_and_temps_iter() {
333         let ty = fx.monomorphize(fx.mir.local_decls[local].ty);
334         let layout = fx.layout_of(ty);
335
336         let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
337
338         let place = make_local_place(fx, local, layout, is_ssa);
339         assert_eq!(fx.local_map.push(place), local);
340     }
341
342     fx.bcx
343         .ins()
344         .jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
345 }
346
347 pub(crate) fn codegen_terminator_call<'tcx>(
348     fx: &mut FunctionCx<'_, 'tcx, impl Module>,
349     span: Span,
350     current_block: Block,
351     func: &Operand<'tcx>,
352     args: &[Operand<'tcx>],
353     destination: Option<(Place<'tcx>, BasicBlock)>,
354 ) {
355     let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
356     let fn_sig = fx
357         .tcx
358         .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
359
360     let destination = destination.map(|(place, bb)| (codegen_place(fx, place), bb));
361
362     // Handle special calls like instrinsics and empty drop glue.
363     let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
364         let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
365             .unwrap()
366             .unwrap()
367             .polymorphize(fx.tcx);
368
369         if fx.tcx.symbol_name(instance).name.starts_with("llvm.") {
370             crate::intrinsics::codegen_llvm_intrinsic_call(
371                 fx,
372                 &fx.tcx.symbol_name(instance).name,
373                 substs,
374                 args,
375                 destination,
376             );
377             return;
378         }
379
380         match instance.def {
381             InstanceDef::Intrinsic(_) => {
382                 crate::intrinsics::codegen_intrinsic_call(fx, instance, args, destination, span);
383                 return;
384             }
385             InstanceDef::DropGlue(_, None) => {
386                 // empty drop glue - a nop.
387                 let (_, dest) = destination.expect("Non terminating drop_in_place_real???");
388                 let ret_block = fx.get_block(dest);
389                 fx.bcx.ins().jump(ret_block, &[]);
390                 return;
391             }
392             _ => Some(instance),
393         }
394     } else {
395         None
396     };
397
398     let extra_args = &args[fn_sig.inputs().len()..];
399     let extra_args = extra_args
400         .iter()
401         .map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
402         .collect::<Vec<_>>();
403     let fn_abi = if let Some(instance) = instance {
404         FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
405     } else {
406         FnAbi::of_fn_ptr(
407             &RevealAllLayoutCx(fx.tcx),
408             fn_ty.fn_sig(fx.tcx),
409             &extra_args,
410         )
411     };
412
413     let is_cold = instance
414         .map(|inst| {
415             fx.tcx
416                 .codegen_fn_attrs(inst.def_id())
417                 .flags
418                 .contains(CodegenFnAttrFlags::COLD)
419         })
420         .unwrap_or(false);
421     if is_cold {
422         fx.cold_blocks.insert(current_block);
423     }
424
425     // Unpack arguments tuple for closures
426     let args = if fn_sig.abi == Abi::RustCall {
427         assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
428         let self_arg = codegen_operand(fx, &args[0]);
429         let pack_arg = codegen_operand(fx, &args[1]);
430
431         let tupled_arguments = match pack_arg.layout().ty.kind() {
432             ty::Tuple(ref tupled_arguments) => tupled_arguments,
433             _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
434         };
435
436         let mut args = Vec::with_capacity(1 + tupled_arguments.len());
437         args.push(self_arg);
438         for i in 0..tupled_arguments.len() {
439             args.push(pack_arg.value_field(fx, mir::Field::new(i)));
440         }
441         args
442     } else {
443         args.iter()
444             .map(|arg| codegen_operand(fx, arg))
445             .collect::<Vec<_>>()
446     };
447
448     //   | indirect call target
449     //   |         | the first argument to be passed
450     //   v         v
451     let (func_ref, first_arg) = match instance {
452         // Trait object call
453         Some(Instance {
454             def: InstanceDef::Virtual(_, idx),
455             ..
456         }) => {
457             #[cfg(debug_assertions)]
458             {
459                 let nop_inst = fx.bcx.ins().nop();
460                 fx.add_comment(
461                     nop_inst,
462                     format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0],),
463                 );
464             }
465             let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
466             (Some(method), smallvec![ptr])
467         }
468
469         // Normal call
470         Some(_) => (
471             None,
472             args.get(0)
473                 .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
474                 .unwrap_or(smallvec![]),
475         ),
476
477         // Indirect call
478         None => {
479             #[cfg(debug_assertions)]
480             {
481                 let nop_inst = fx.bcx.ins().nop();
482                 fx.add_comment(nop_inst, "indirect call");
483             }
484             let func = codegen_operand(fx, func).load_scalar(fx);
485             (
486                 Some(func),
487                 args.get(0)
488                     .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
489                     .unwrap_or(smallvec![]),
490             )
491         }
492     };
493
494     let ret_place = destination.map(|(place, _)| place);
495     let (call_inst, call_args) = self::returning::codegen_with_call_return_arg(
496         fx,
497         &fn_abi.ret,
498         ret_place,
499         |fx, return_ptr| {
500             let regular_args_count = args.len();
501             let mut call_args: Vec<Value> = return_ptr
502                 .into_iter()
503                 .chain(first_arg.into_iter())
504                 .chain(
505                     args.into_iter()
506                         .enumerate()
507                         .skip(1)
508                         .map(|(i, arg)| adjust_arg_for_abi(fx, arg, &fn_abi.args[i]).into_iter())
509                         .flatten(),
510                 )
511                 .collect::<Vec<_>>();
512
513             if instance
514                 .map(|inst| inst.def.requires_caller_location(fx.tcx))
515                 .unwrap_or(false)
516             {
517                 // Pass the caller location for `#[track_caller]`.
518                 let caller_location = fx.get_caller_location(span);
519                 call_args.extend(
520                     adjust_arg_for_abi(fx, caller_location, &fn_abi.args[regular_args_count])
521                         .into_iter(),
522                 );
523                 assert_eq!(fn_abi.args.len(), regular_args_count + 1);
524             } else {
525                 assert_eq!(fn_abi.args.len(), regular_args_count);
526             }
527
528             let call_inst = if let Some(func_ref) = func_ref {
529                 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
530                 let sig = fx.bcx.import_signature(sig);
531                 fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
532             } else {
533                 let func_ref =
534                     fx.get_function_ref(instance.expect("non-indirect call on non-FnDef type"));
535                 fx.bcx.ins().call(func_ref, &call_args)
536             };
537
538             (call_inst, call_args)
539         },
540     );
541
542     // FIXME find a cleaner way to support varargs
543     if fn_sig.c_variadic {
544         if fn_sig.abi != Abi::C {
545             fx.tcx.sess.span_fatal(
546                 span,
547                 &format!("Variadic call for non-C abi {:?}", fn_sig.abi),
548             );
549         }
550         let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
551         let abi_params = call_args
552             .into_iter()
553             .map(|arg| {
554                 let ty = fx.bcx.func.dfg.value_type(arg);
555                 if !ty.is_int() {
556                     // FIXME set %al to upperbound on float args once floats are supported
557                     fx.tcx
558                         .sess
559                         .span_fatal(span, &format!("Non int ty {:?} for variadic call", ty));
560                 }
561                 AbiParam::new(ty)
562             })
563             .collect::<Vec<AbiParam>>();
564         fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
565     }
566
567     if let Some((_, dest)) = destination {
568         let ret_block = fx.get_block(dest);
569         fx.bcx.ins().jump(ret_block, &[]);
570     } else {
571         trap_unreachable(fx, "[corruption] Diverging function returned");
572     }
573 }
574
575 pub(crate) fn codegen_drop<'tcx>(
576     fx: &mut FunctionCx<'_, 'tcx, impl Module>,
577     span: Span,
578     drop_place: CPlace<'tcx>,
579 ) {
580     let ty = drop_place.layout().ty;
581     let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
582
583     if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
584         // we don't actually need to drop anything
585     } else {
586         match ty.kind() {
587             ty::Dynamic(..) => {
588                 let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
589                 let ptr = ptr.get_addr(fx);
590                 let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
591
592                 // FIXME(eddyb) perhaps move some of this logic into
593                 // `Instance::resolve_drop_in_place`?
594                 let virtual_drop = Instance {
595                     def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
596                     substs: drop_instance.substs,
597                 };
598                 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), virtual_drop, &[]);
599
600                 let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
601                 let sig = fx.bcx.import_signature(sig);
602                 fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
603             }
604             _ => {
605                 assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
606
607                 let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), drop_instance, &[]);
608
609                 let arg_value = drop_place.place_ref(
610                     fx,
611                     fx.layout_of(fx.tcx.mk_ref(
612                         &ty::RegionKind::ReErased,
613                         TypeAndMut {
614                             ty,
615                             mutbl: crate::rustc_hir::Mutability::Mut,
616                         },
617                     )),
618                 );
619                 let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0]);
620
621                 let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
622
623                 if drop_instance.def.requires_caller_location(fx.tcx) {
624                     // Pass the caller location for `#[track_caller]`.
625                     let caller_location = fx.get_caller_location(span);
626                     call_args.extend(
627                         adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1]).into_iter(),
628                     );
629                 }
630
631                 let func_ref = fx.get_function_ref(drop_instance);
632                 fx.bcx.ins().call(func_ref, &call_args);
633             }
634         }
635     }
636 }