4 use rustc_target::spec::abi::Abi;
16 fn get_param_ty(self, _fx: &FunctionCx) -> Type {
18 PassMode::NoPass => unimplemented!("pass mode nopass"),
19 PassMode::ByVal(cton_type) => cton_type,
20 PassMode::ByRef => types::I64,
25 fn get_pass_mode<'a, 'tcx: 'a>(
26 tcx: TyCtxt<'a, 'tcx, 'tcx>,
31 if ty.sty == tcx.mk_nil().sty {
38 } else if let Some(ret_ty) = crate::common::cton_type_from_ty(tcx, ty) {
39 PassMode::ByVal(ret_ty)
42 unimplemented!("Non scalars are not yet supported for \"C\" abi");
48 pub fn cton_sig_from_fn_ty<'a, 'tcx: 'a>(
49 tcx: TyCtxt<'a, 'tcx, 'tcx>,
52 let sig = ty_fn_sig(tcx, fn_ty);
53 assert!(!sig.variadic, "Variadic function are not yet supported");
54 let (call_conv, inputs, output): (CallConv, Vec<Ty>, Ty) = match sig.abi {
55 Abi::Rust => (CallConv::Fast, sig.inputs().to_vec(), sig.output()),
56 Abi::C => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
59 "rust-call sig: {:?} inputs: {:?} output: {:?}",
64 assert_eq!(sig.inputs().len(), 2);
65 let extra_args = match sig.inputs().last().unwrap().sty {
66 ty::TyTuple(ref tupled_arguments) => tupled_arguments,
67 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
69 let mut inputs: Vec<Ty> = vec![sig.inputs()[0]];
70 inputs.extend(extra_args.into_iter());
71 (CallConv::Fast, inputs, sig.output())
73 Abi::System => bug!("system abi should be selected elsewhere"),
74 Abi::RustIntrinsic => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
75 _ => unimplemented!("unsupported abi {:?}", sig.abi),
80 .filter_map(|ty| match get_pass_mode(tcx, sig.abi, ty, false) {
81 PassMode::ByVal(cton_ty) => Some(cton_ty),
82 PassMode::NoPass => unimplemented!("pass mode nopass"),
83 PassMode::ByRef => Some(types::I64),
86 let (params, returns) = match get_pass_mode(tcx, sig.abi, output, true) {
87 PassMode::NoPass => (inputs.map(AbiParam::new).collect(), vec![]),
88 PassMode::ByVal(ret_ty) => (
89 inputs.map(AbiParam::new).collect(),
90 vec![AbiParam::new(ret_ty)],
94 Some(types::I64).into_iter() // First param is place to put return val
107 argument_bytes: None,
111 fn ty_fn_sig<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> ty::FnSig<'tcx> {
112 let sig = match ty.sty {
114 // Shims currently have type TyFnPtr. Not sure this should remain.
115 ty::TyFnPtr(_) => ty.fn_sig(tcx),
116 ty::TyClosure(def_id, substs) => {
117 let sig = substs.closure_sig(def_id, tcx);
119 let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
120 sig.map_bound(|sig| tcx.mk_fn_sig(
121 iter::once(*env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
128 ty::TyGenerator(def_id, substs, _) => {
129 let sig = substs.poly_sig(def_id, tcx);
131 let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
132 let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
134 sig.map_bound(|sig| {
135 let state_did = tcx.lang_items().gen_state().unwrap();
136 let state_adt_ref = tcx.adt_def(state_did);
137 let state_substs = tcx.intern_substs(&[
139 sig.return_ty.into(),
141 let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
143 tcx.mk_fn_sig(iter::once(env_ty),
146 hir::Unsafety::Normal,
151 _ => bug!("unexpected type {:?} to ty_fn_sig", ty)
153 tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &sig)
156 fn get_function_name_and_sig<'a, 'tcx>(
157 tcx: TyCtxt<'a, 'tcx, 'tcx>,
158 inst: Instance<'tcx>,
159 ) -> (String, Signature) {
160 assert!(!inst.substs.needs_infer() && !inst.substs.has_param_types());
161 let fn_ty = inst.ty(tcx);
162 let sig = cton_sig_from_fn_ty(tcx, fn_ty);
163 let def_path_based_names =
164 ::rustc_mir::monomorphize::item::DefPathBasedNames::new(tcx, false, false);
165 let mut name = String::new();
166 def_path_based_names.push_instance_as_string(inst, &mut name);
170 impl<'a, 'tcx: 'a> CodegenCx<'a, 'tcx, CurrentBackend> {
171 pub fn predefine_function(&mut self, inst: Instance<'tcx>) -> (FuncId, Function) {
172 let (name, sig) = crate::abi::get_function_name_and_sig(self.tcx, inst);
175 .declare_function(&name, Linkage::Export, &sig)
178 Function::with_name_signature(ExternalName::user(0, func_id.index() as u32), sig);
183 impl<'a, 'tcx: 'a> FunctionCx<'a, 'tcx> {
184 /// Instance must be monomorphized
185 pub fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
186 let (name, sig) = get_function_name_and_sig(self.tcx, inst);
189 .declare_function(&name, Linkage::Import, &sig)
192 .declare_func_in_func(func_id, &mut self.bcx.func)
198 input_tys: Vec<types::Type>,
199 output_ty: Option<types::Type>,
202 let sig = Signature {
203 params: input_tys.iter().cloned().map(AbiParam::new).collect(),
204 returns: vec![AbiParam::new(output_ty.unwrap_or(types::VOID))],
205 call_conv: CallConv::SystemV,
206 argument_bytes: None,
210 .declare_function(&name, Linkage::Import, &sig)
214 .declare_func_in_func(func_id, &mut self.bcx.func);
215 let call_inst = self.bcx.ins().call(func_ref, args);
216 if output_ty.is_none() {
219 let results = self.bcx.inst_results(call_inst);
220 assert_eq!(results.len(), 1);
227 args: &[CValue<'tcx>],
230 let (input_tys, args): (Vec<_>, Vec<_>) = args
234 self.cton_type(arg.layout().ty).unwrap(),
235 arg.load_value(self),
238 let return_layout = self.layout_of(return_ty);
239 let return_ty = if let TypeVariants::TyTuple(tup) = return_ty.sty {
241 bug!("easy_call( (...) -> <non empty tuple> ) is not allowed");
245 Some(self.cton_type(return_ty).unwrap())
247 if let Some(val) = self.lib_call(name, input_tys, return_ty, &args) {
248 CValue::ByVal(val, return_layout)
250 CValue::ByRef(self.bcx.ins().iconst(types::I64, 0), return_layout)
254 fn self_sig(&self) -> FnSig<'tcx> {
255 ty_fn_sig(self.tcx, self.instance.ty(self.tcx))
258 fn return_type(&self) -> Ty<'tcx> {
259 self.self_sig().output()
263 pub fn codegen_fn_prelude<'a, 'tcx: 'a>(fx: &mut FunctionCx<'a, 'tcx>, start_ebb: Ebb) {
264 let ssa_analyzed = crate::analyze::analyze(fx);
265 fx.tcx.sess.warn(&format!("ssa {:?}", ssa_analyzed));
267 match fx.self_sig().abi {
268 Abi::Rust | Abi::RustCall => {}
269 _ => unimplemented!("declared function with non \"rust\" or \"rust-call\" abi"),
272 let ret_layout = fx.layout_of(fx.return_type());
273 let output_pass_mode = get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true);
274 let ret_param = match output_pass_mode {
275 PassMode::NoPass => {
278 PassMode::ByVal(ret_ty) => {
282 Some(fx.bcx.append_ebb_param(start_ebb, types::I64))
291 let func_params = fx.mir.args_iter().map(|local| {
292 let arg_ty = fx.monomorphize(&fx.mir.local_decls[local].ty);
294 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
295 if Some(local) == fx.mir.spread_arg {
296 // This argument (e.g. the last argument in the "rust-call" ABI)
297 // is a tuple that was spread at the ABI level and now we have
298 // to reconstruct it into a tuple local variable, from multiple
299 // individual function arguments.
301 let tupled_arg_tys = match arg_ty.sty {
302 ty::TyTuple(ref tys) => tys,
303 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
306 let mut ebb_params = Vec::new();
307 for arg_ty in tupled_arg_tys.iter() {
308 let cton_type = get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
309 ebb_params.push(fx.bcx.append_ebb_param(start_ebb, cton_type));
312 (local, ArgKind::Spread(ebb_params), arg_ty)
314 let cton_type = get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
315 (local, ArgKind::Normal(fx.bcx.append_ebb_param(start_ebb, cton_type)), arg_ty)
317 }).collect::<Vec<(Local, ArgKind, Ty)>>();
319 match output_pass_mode {
320 PassMode::NoPass => {
321 let null = fx.bcx.ins().iconst(types::I64, 0);
322 //unimplemented!("pass mode nopass");
323 fx.local_map.insert(RETURN_PLACE, CPlace::Addr(null, fx.layout_of(fx.return_type())));
325 PassMode::ByVal(ret_ty) => {
326 let var = Variable(RETURN_PLACE);
327 fx.bcx.declare_var(var, ret_ty);
329 .insert(RETURN_PLACE, CPlace::Var(var, ret_layout));
333 .insert(RETURN_PLACE, CPlace::Addr(ret_param.unwrap(), ret_layout));
337 for (local, arg_kind, ty) in func_params {
338 let layout = fx.layout_of(ty);
340 if let ArgKind::Normal(ebb_param) = arg_kind {
344 .contains(crate::analyze::Flags::NOT_SSA)
346 let var = Variable(local);
347 fx.bcx.declare_var(var, fx.cton_type(ty).unwrap());
348 match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false) {
349 PassMode::NoPass => unimplemented!("pass mode nopass"),
350 PassMode::ByVal(_) => fx.bcx.def_var(var, ebb_param),
352 let val = CValue::ByRef(ebb_param, fx.layout_of(ty)).load_value(fx);
353 fx.bcx.def_var(var, val);
356 fx.local_map.insert(local, CPlace::Var(var, layout));
361 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
362 kind: StackSlotKind::ExplicitSlot,
363 size: layout.size.bytes() as u32,
367 let place = CPlace::from_stack_slot(fx, stack_slot, ty);
370 ArgKind::Normal(ebb_param) => {
371 match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false) {
372 PassMode::NoPass => unimplemented!("pass mode nopass"),
373 PassMode::ByVal(_) => place.write_cvalue(fx, CValue::ByVal(ebb_param, place.layout())),
374 PassMode::ByRef => place.write_cvalue(fx, CValue::ByRef(ebb_param, place.layout())),
377 ArgKind::Spread(ebb_params) => {
378 for (i, ebb_param) in ebb_params.into_iter().enumerate() {
379 let sub_place = place.place_field(fx, mir::Field::new(i));
380 match get_pass_mode(fx.tcx, fx.self_sig().abi, sub_place.layout().ty, false) {
381 PassMode::NoPass => unimplemented!("pass mode nopass"),
382 PassMode::ByVal(_) => sub_place.write_cvalue(fx, CValue::ByVal(ebb_param, sub_place.layout())),
383 PassMode::ByRef => sub_place.write_cvalue(fx, CValue::ByRef(ebb_param, sub_place.layout())),
388 fx.local_map.insert(local, place);
391 for local in fx.mir.vars_and_temps_iter() {
392 let ty = fx.mir.local_decls[local].ty;
393 let layout = fx.layout_of(ty);
395 let place = if ssa_analyzed
398 .contains(crate::analyze::Flags::NOT_SSA)
400 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
401 kind: StackSlotKind::ExplicitSlot,
402 size: layout.size.bytes() as u32,
405 CPlace::from_stack_slot(fx, stack_slot, ty)
407 let var = Variable(local);
408 fx.bcx.declare_var(var, fx.cton_type(ty).unwrap());
409 CPlace::Var(var, layout)
412 fx.local_map.insert(local, place);
416 pub fn codegen_call<'a, 'tcx: 'a>(
417 fx: &mut FunctionCx<'a, 'tcx>,
418 func: &Operand<'tcx>,
419 args: &[Operand<'tcx>],
420 destination: &Option<(Place<'tcx>, BasicBlock)>,
422 let func = trans_operand(fx, func);
423 let fn_ty = func.layout().ty;
424 let sig = ty_fn_sig(fx.tcx, fn_ty);
426 // Unpack arguments tuple for closures
427 let args = if sig.abi == Abi::RustCall {
428 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
429 let self_arg = trans_operand(fx, &args[0]);
430 let pack_arg = trans_operand(fx, &args[1]);
431 let mut args = Vec::new();
433 match pack_arg.layout().ty.sty {
434 ty::TyTuple(ref tupled_arguments) => {
435 for (i, _) in tupled_arguments.iter().enumerate() {
436 args.push(pack_arg.value_field(fx, mir::Field::new(i)));
439 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
443 pack_arg.layout().ty,
444 args.iter().map(|a| a.layout().ty).collect::<Vec<_>>()
449 .map(|arg| trans_operand(fx, arg))
453 let destination = destination.as_ref().map(|(place, bb)| {
454 (trans_place(fx, place), *bb)
457 if codegen_intrinsic_call(fx, fn_ty, sig, &args, destination) {
461 let ret_layout = fx.layout_of(sig.output());
463 let output_pass_mode = get_pass_mode(fx.tcx, sig.abi, sig.output(), true);
464 println!("{:?}", output_pass_mode);
465 let return_ptr = match output_pass_mode {
466 PassMode::NoPass => None,
467 PassMode::ByRef => match destination {
468 Some((place, _)) => Some(place.expect_addr()),
469 None => Some(fx.bcx.ins().iconst(types::I64, 0)),
471 PassMode::ByVal(_) => None,
474 let call_args: Vec<Value> = return_ptr
478 .map(|arg| match get_pass_mode(fx.tcx, sig.abi, arg.layout().ty, false) {
479 PassMode::NoPass => unimplemented!("pass mode nopass"),
480 PassMode::ByVal(_) => arg.load_value(fx),
481 PassMode::ByRef => arg.force_stack(fx),
483 ).collect::<Vec<_>>();
485 let inst = match func {
486 CValue::Func(func, _) => fx.bcx.ins().call(func, &call_args),
488 let func = func.load_value(fx);
489 let sig = fx.bcx.import_signature(cton_sig_from_fn_ty(fx.tcx, fn_ty));
490 fx.bcx.ins().call_indirect(sig, func, &call_args)
494 match output_pass_mode {
495 PassMode::NoPass => {}
496 PassMode::ByVal(_) => {
497 if let Some((ret_place, _)) = destination {
498 let results = fx.bcx.inst_results(inst);
499 ret_place.write_cvalue(fx, CValue::ByVal(results[0], ret_layout));
502 PassMode::ByRef => {}
504 if let Some((_, dest)) = destination {
505 let ret_ebb = fx.get_ebb(dest);
506 fx.bcx.ins().jump(ret_ebb, &[]);
508 fx.bcx.ins().trap(TrapCode::User(!0));
512 pub fn codegen_return(fx: &mut FunctionCx) {
513 match get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true) {
514 PassMode::NoPass | PassMode::ByRef => {
515 fx.bcx.ins().return_(&[]);
517 PassMode::ByVal(_) => {
518 let place = fx.get_local_place(RETURN_PLACE);
519 let ret_val = place.to_cvalue(fx).load_value(fx);
520 fx.bcx.ins().return_(&[ret_val]);
525 fn codegen_intrinsic_call<'a, 'tcx: 'a>(
526 fx: &mut FunctionCx<'a, 'tcx>,
529 args: &[CValue<'tcx>],
530 destination: Option<(CPlace<'tcx>, BasicBlock)>,
532 if let TypeVariants::TyFnDef(def_id, substs) = fn_ty.sty {
533 if sig.abi == Abi::RustIntrinsic {
534 let intrinsic = fx.tcx.item_name(def_id).as_str();
535 let intrinsic = &intrinsic[..];
537 let ret = match destination {
538 Some((place, _)) => place,
541 "codegen_call(fx, _, {:?}, {:?})",
544 // Insert non returning intrinsics here
547 fx.bcx.ins().trap(TrapCode::User(!0 - 1));
550 fx.bcx.ins().trap(TrapCode::User(!0 - 1));
552 _ => unimplemented!("unsupported instrinsic {}", intrinsic),
558 let nil_ty = fx.tcx.mk_nil();
559 let u64_layout = fx.layout_of(fx.tcx.types.u64);
560 let usize_layout = fx.layout_of(fx.tcx.types.usize);
564 assert_eq!(args.len(), 1);
567 assert_eq!(args.len(), 2);
568 let base = args[0].load_value(fx);
569 let offset = args[1].load_value(fx);
570 let res = fx.bcx.ins().iadd(base, offset);
571 let res = CValue::ByVal(res, ret.layout());
572 ret.write_cvalue(fx, res);
574 "likely" | "unlikely" => {
575 assert_eq!(args.len(), 1);
576 ret.write_cvalue(fx, args[0]);
578 "copy" | "copy_nonoverlapping" => {
579 let elem_ty = substs.type_at(0);
580 let elem_size: u64 = fx.layout_of(elem_ty).size.bytes();
581 let elem_size = fx.bcx.ins().iconst(types::I64, elem_size as i64);
582 assert_eq!(args.len(), 3);
585 let count = args[2].load_value(fx);
586 let byte_amount = fx.bcx.ins().imul(count, elem_size);
589 &[dst, src, CValue::ByVal(byte_amount, usize_layout)],
593 "discriminant_value" => {
594 assert_eq!(args.len(), 1);
595 let discr = crate::base::trans_get_discriminant(fx, args[0], ret.layout());
596 ret.write_cvalue(fx, discr);
599 assert_eq!(args.len(), 0);
600 let size_of = fx.layout_of(substs.type_at(0)).size.bytes();
601 let size_of = CValue::const_val(fx, usize_layout.ty, size_of as i64);
602 ret.write_cvalue(fx, size_of);
605 assert_eq!(args.len(), 0);
606 let type_id = fx.tcx.type_id_hash(substs.type_at(0));
607 let type_id = CValue::const_val(fx, u64_layout.ty, type_id as i64);
608 ret.write_cvalue(fx, type_id);
611 assert_eq!(args.len(), 0);
612 let min_align = fx.layout_of(substs.type_at(0)).align.abi();
613 let min_align = CValue::const_val(fx, usize_layout.ty, min_align as i64);
614 ret.write_cvalue(fx, min_align);
616 _ if intrinsic.starts_with("unchecked_") => {
617 assert_eq!(args.len(), 2);
618 let bin_op = match intrinsic {
619 "unchecked_div" => BinOp::Div,
620 "unchecked_rem" => BinOp::Rem,
621 "unchecked_shl" => BinOp::Shl,
622 "unchecked_shr" => BinOp::Shr,
623 _ => unimplemented!("intrinsic {}", intrinsic),
625 let res = match ret.layout().ty.sty {
626 TypeVariants::TyUint(_) => crate::base::trans_int_binop(
634 TypeVariants::TyInt(_) => crate::base::trans_int_binop(
644 ret.write_cvalue(fx, res);
646 _ if intrinsic.ends_with("_with_overflow") => {
647 assert_eq!(args.len(), 2);
648 assert_eq!(args[0].layout().ty, args[1].layout().ty);
649 let bin_op = match intrinsic {
650 "add_with_overflow" => BinOp::Add,
651 "sub_with_overflow" => BinOp::Sub,
652 "mul_with_overflow" => BinOp::Mul,
653 _ => unimplemented!("intrinsic {}", intrinsic),
655 let res = match args[0].layout().ty.sty {
656 TypeVariants::TyUint(_) => crate::base::trans_checked_int_binop(
664 TypeVariants::TyInt(_) => crate::base::trans_checked_int_binop(
674 ret.write_cvalue(fx, res);
676 _ if intrinsic.starts_with("overflowing_") => {
677 assert_eq!(args.len(), 2);
678 assert_eq!(args[0].layout().ty, args[1].layout().ty);
679 let bin_op = match intrinsic {
680 "overflowing_add" => BinOp::Add,
681 "overflowing_sub" => BinOp::Sub,
682 "overflowing_mul" => BinOp::Mul,
683 _ => unimplemented!("intrinsic {}", intrinsic),
685 let res = match args[0].layout().ty.sty {
686 TypeVariants::TyUint(_) => crate::base::trans_int_binop(
694 TypeVariants::TyInt(_) => crate::base::trans_int_binop(
704 ret.write_cvalue(fx, res);
707 assert_eq!(args.len(), 2);
708 let base = args[0].load_value(fx);
709 let offset = args[1].load_value(fx);
710 let res = fx.bcx.ins().iadd(base, offset);
711 ret.write_cvalue(fx, CValue::ByVal(res, args[0].layout()));
714 assert_eq!(args.len(), 1);
715 let src_ty = substs.type_at(0);
716 let dst_ty = substs.type_at(1);
717 assert_eq!(args[0].layout().ty, src_ty);
718 let addr = args[0].force_stack(fx);
719 let dst_layout = fx.layout_of(dst_ty);
720 ret.write_cvalue(fx, CValue::ByRef(addr, dst_layout))
723 assert_eq!(args.len(), 0);
724 let ty = substs.type_at(0);
725 let layout = fx.layout_of(ty);
726 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
727 kind: StackSlotKind::ExplicitSlot,
728 size: layout.size.bytes() as u32,
732 let uninit_place = CPlace::from_stack_slot(fx, stack_slot, ty);
733 let uninit_val = uninit_place.to_cvalue(fx);
734 ret.write_cvalue(fx, uninit_val);
736 "ctlz" | "ctlz_nonzero" => {
737 assert_eq!(args.len(), 1);
738 let arg = args[0].load_value(fx);
739 let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
740 ret.write_cvalue(fx, res);
742 "cttz" | "cttz_nonzero" => {
743 assert_eq!(args.len(), 1);
744 let arg = args[0].load_value(fx);
745 let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
746 ret.write_cvalue(fx, res);
749 assert_eq!(args.len(), 1);
750 let arg = args[0].load_value(fx);
751 let res = CValue::ByVal(fx.bcx.ins().popcnt(arg), args[0].layout());
752 ret.write_cvalue(fx, res);
754 _ => unimpl!("unsupported intrinsic {}", intrinsic),
757 if let Some((_, dest)) = destination {
758 let ret_ebb = fx.get_ebb(dest);
759 fx.bcx.ins().jump(ret_ebb, &[]);
761 fx.bcx.ins().trap(TrapCode::User(!0));