4 use rustc_target::spec::abi::Abi;
8 pub fn cton_sig_from_fn_ty<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn_ty: Ty<'tcx>) -> Signature {
9 let sig = ty_fn_sig(tcx, fn_ty);
10 assert!(!sig.variadic, "Variadic function are not yet supported");
11 let (call_conv, inputs, _output): (CallConv, Vec<Ty>, Ty) = match sig.abi {
12 Abi::Rust => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
14 println!("rust-call sig: {:?} inputs: {:?} output: {:?}", sig, sig.inputs(), sig.output());
15 assert_eq!(sig.inputs().len(), 2);
16 let extra_args = match sig.inputs().last().unwrap().sty {
17 ty::TyTuple(ref tupled_arguments) => tupled_arguments,
18 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
20 let mut inputs: Vec<Ty> = vec![sig.inputs()[0]];
21 inputs.extend(extra_args.into_iter());
28 Abi::System => bug!("system abi should be selected elsewhere"),
29 Abi::RustIntrinsic => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
30 _ => unimplemented!("unsupported abi {:?}", sig.abi),
33 params: Some(types::I64).into_iter() // First param is place to put return val
34 .chain(inputs.into_iter().map(|ty| cton_type_from_ty(tcx, ty).unwrap_or(types::I64)))
35 .map(AbiParam::new).collect(),
42 fn ty_fn_sig<'a, 'tcx>(
43 tcx: TyCtxt<'a, 'tcx, 'tcx>,
45 ) -> ty::FnSig<'tcx> {
46 let sig = match ty.sty {
48 // Shims currently have type TyFnPtr. Not sure this should remain.
49 ty::TyFnPtr(_) => ty.fn_sig(tcx),
50 ty::TyClosure(def_id, substs) => {
51 let sig = substs.closure_sig(def_id, tcx);
53 let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
54 sig.map_bound(|sig| tcx.mk_fn_sig(
55 iter::once(*env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
62 ty::TyGenerator(def_id, substs, _) => {
63 let sig = substs.poly_sig(def_id, tcx);
65 let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
66 let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
69 let state_did = tcx.lang_items().gen_state().unwrap();
70 let state_adt_ref = tcx.adt_def(state_did);
71 let state_substs = tcx.intern_substs(&[
75 let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
77 tcx.mk_fn_sig(iter::once(env_ty),
80 hir::Unsafety::Normal,
85 _ => bug!("unexpected type {:?} to ty_fn_sig", ty)
87 tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &sig)
90 impl<'a, 'tcx: 'a> FunctionCx<'a, 'tcx> {
91 /// Instance must be monomorphized
92 pub fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
93 assert!(!inst.substs.needs_infer() && !inst.substs.has_param_types());
95 let module = &mut self.module;
96 let func_id = *self.def_id_fn_id_map.entry(inst).or_insert_with(|| {
97 let fn_ty = inst.ty(tcx);
98 let sig = cton_sig_from_fn_ty(tcx, fn_ty);
99 let def_path_based_names = ::rustc_mir::monomorphize::item::DefPathBasedNames::new(tcx, false, false);
100 let mut name = String::new();
101 def_path_based_names.push_instance_as_string(inst, &mut name);
102 module.declare_function(&name, Linkage::Local, &sig).unwrap()
104 module.declare_func_in_func(func_id, &mut self.bcx.func)
110 input_tys: Vec<types::Type>,
111 output_ty: Option<types::Type>,
114 let sig = Signature {
115 params: input_tys.iter().cloned().map(AbiParam::new).collect(),
116 returns: vec![AbiParam::new(output_ty.unwrap_or(types::VOID))],
117 call_conv: CallConv::SystemV,
118 argument_bytes: None,
120 let func_id = self.module.declare_function(&name, Linkage::Import, &sig).unwrap();
121 let func_ref = self.module.declare_func_in_func(func_id, &mut self.bcx.func);
122 let call_inst = self.bcx.ins().call(func_ref, args);
123 if output_ty.is_none() {
126 let results = self.bcx.inst_results(call_inst);
127 assert_eq!(results.len(), 1);
131 pub fn easy_call(&mut self, name: &str, args: &[CValue<'tcx>], return_ty: Ty<'tcx>) -> CValue<'tcx> {
132 let (input_tys, args): (Vec<_>, Vec<_>) = args.into_iter().map(|arg| (self.cton_type(arg.layout().ty).unwrap(), arg.load_value(self))).unzip();
133 let return_layout = self.layout_of(return_ty);
134 let return_ty = if let TypeVariants::TyTuple(tup) = return_ty.sty {
136 bug!("easy_call( (...) -> <non empty tuple> ) is not allowed");
140 Some(self.cton_type(return_ty).unwrap())
142 if let Some(val) = self.lib_call(name, input_tys, return_ty, &args) {
143 CValue::ByVal(val, return_layout)
145 CValue::ByRef(self.bcx.ins().iconst(types::I64, 0), return_layout)
149 fn self_sig(&self) -> FnSig<'tcx> {
150 ty_fn_sig(self.tcx, self.instance.ty(self.tcx))
153 fn return_type(&self) -> Ty<'tcx> {
154 self.self_sig().output()
158 pub fn codegen_fn_prelude<'a, 'tcx: 'a>(fx: &mut FunctionCx<'a, 'tcx>, start_ebb: Ebb) {
159 let ret_param = fx.bcx.append_ebb_param(start_ebb, types::I64);
160 let _ = fx.bcx.create_stack_slot(StackSlotData {
161 kind: StackSlotKind::ExplicitSlot,
164 }); // Dummy stack slot for debugging
171 let func_params = fx.mir.args_iter().map(|local| {
172 let arg_ty = fx.mir.local_decls[local].ty;
174 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
175 if Some(local) == fx.mir.spread_arg {
176 // This argument (e.g. the last argument in the "rust-call" ABI)
177 // is a tuple that was spread at the ABI level and now we have
178 // to reconstruct it into a tuple local variable, from multiple
179 // individual function arguments.
181 let tupled_arg_tys = match arg_ty.sty {
182 ty::TyTuple(ref tys) => tys,
183 _ => bug!("spread argument isn't a tuple?!")
186 let mut ebb_params = Vec::new();
187 for arg_ty in tupled_arg_tys.iter() {
188 let cton_type = fx.cton_type(arg_ty).unwrap_or(types::I64);
189 ebb_params.push(fx.bcx.append_ebb_param(start_ebb, cton_type));
192 (local, ArgKind::Spread(ebb_params), arg_ty)
194 let cton_type = fx.cton_type(arg_ty).unwrap_or(types::I64);
195 (local, ArgKind::Normal(fx.bcx.append_ebb_param(start_ebb, cton_type)), arg_ty)
197 }).collect::<Vec<(Local, ArgKind, Ty)>>();
199 let ret_layout = fx.layout_of(fx.return_type());
200 fx.local_map.insert(RETURN_PLACE, CPlace::Addr(ret_param, ret_layout));
202 for (local, arg_kind, ty) in func_params {
203 let layout = fx.layout_of(ty);
204 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
205 kind: StackSlotKind::ExplicitSlot,
206 size: layout.size.bytes() as u32,
210 let place = CPlace::from_stack_slot(fx, stack_slot, ty);
213 ArgKind::Normal(ebb_param) => {
214 if fx.cton_type(ty).is_some() {
215 place.write_cvalue(fx, CValue::ByVal(ebb_param, place.layout()));
217 place.write_cvalue(fx, CValue::ByRef(ebb_param, place.layout()));
220 ArgKind::Spread(ebb_params) => {
221 for (i, ebb_param) in ebb_params.into_iter().enumerate() {
222 let sub_place = place.place_field(fx, mir::Field::new(i));
223 if fx.cton_type(sub_place.layout().ty).is_some() {
224 sub_place.write_cvalue(fx, CValue::ByVal(ebb_param, sub_place.layout()));
226 sub_place.write_cvalue(fx, CValue::ByRef(ebb_param, sub_place.layout()));
231 fx.local_map.insert(local, place);
234 for local in fx.mir.vars_and_temps_iter() {
235 let ty = fx.mir.local_decls[local].ty;
236 let layout = fx.layout_of(ty);
237 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
238 kind: StackSlotKind::ExplicitSlot,
239 size: layout.size.bytes() as u32,
242 let place = CPlace::from_stack_slot(fx, stack_slot, ty);
243 fx.local_map.insert(local, place);
247 pub fn codegen_call<'a, 'tcx: 'a>(
248 fx: &mut FunctionCx<'a, 'tcx>,
249 func: &Operand<'tcx>,
250 args: &[Operand<'tcx>],
251 destination: &Option<(Place<'tcx>, BasicBlock)>,
253 let func = ::base::trans_operand(fx, func);
254 let fn_ty = func.layout().ty;
255 let sig = ty_fn_sig(fx.tcx, fn_ty);
257 let return_place = if let Some((place, _)) = destination {
258 Some(::base::trans_place(fx, place))
263 // Unpack arguments tuple for closures
264 let args = if sig.abi == Abi::RustCall {
265 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
266 let self_arg = ::base::trans_operand(fx, &args[0]);
267 let pack_arg = ::base::trans_operand(fx, &args[1]);
268 let mut args = Vec::new();
270 match pack_arg.layout().ty.sty {
271 ty::TyTuple(ref tupled_arguments) => {
272 for (i, _) in tupled_arguments.iter().enumerate() {
273 args.push(pack_arg.value_field(fx, mir::Field::new(i)));
276 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
278 println!("{:?} {:?}", pack_arg.layout().ty, args.iter().map(|a|a.layout().ty).collect::<Vec<_>>());
284 ::base::trans_operand(fx, arg)
289 if let TypeVariants::TyFnDef(def_id, substs) = fn_ty.sty {
290 if sig.abi == Abi::RustIntrinsic {
291 let intrinsic = fx.tcx.item_name(def_id).as_str();
292 let intrinsic = &intrinsic[..];
294 let nil_ty = fx.tcx.mk_nil();
295 let usize_layout = fx.layout_of(fx.tcx.types.usize);
296 let ret = return_place.unwrap();
299 fx.bcx.ins().trap(TrapCode::User(!0 - 1));
301 "copy" | "copy_nonoverlapping" => {
302 let elem_ty = substs.type_at(0);
303 let elem_size: u64 = fx.layout_of(elem_ty).size.bytes();
304 let elem_size = fx.bcx.ins().iconst(types::I64, elem_size as i64);
305 assert_eq!(args.len(), 3);
308 let count = args[2].load_value(fx);
309 let byte_amount = fx.bcx.ins().imul(count, elem_size);
310 fx.easy_call("memmove", &[dst, src, CValue::ByVal(byte_amount, usize_layout)], nil_ty);
311 unimplemented!("copy");
313 "discriminant_value" => {
314 assert_eq!(args.len(), 1);
315 let discr = ::base::trans_get_discriminant(fx, args[0], ret.layout());
316 ret.write_cvalue(fx, discr);
319 assert_eq!(args.len(), 0);
320 let size_of = fx.layout_of(substs.type_at(0)).size.bytes();
321 let size_of = CValue::const_val(fx, usize_layout.ty, size_of as i64);
322 ret.write_cvalue(fx, size_of);
325 assert_eq!(args.len(), 0);
326 let type_id = fx.tcx.type_id_hash(substs.type_at(0));
327 let type_id = CValue::const_val(fx, usize_layout.ty, type_id as i64);
328 ret.write_cvalue(fx, type_id);
331 assert_eq!(args.len(), 0);
332 let min_align = fx.layout_of(substs.type_at(0)).align.abi();
333 let min_align = CValue::const_val(fx, usize_layout.ty, min_align as i64);
334 ret.write_cvalue(fx, min_align);
336 _ if intrinsic.starts_with("unchecked_") => {
337 assert_eq!(args.len(), 2);
338 let bin_op = match intrinsic {
339 "unchecked_div" => BinOp::Div,
340 "unchecked_rem" => BinOp::Rem,
341 "unchecked_shl" => BinOp::Shl,
342 "unchecked_shr" => BinOp::Shr,
343 _ => unimplemented!("intrinsic {}", intrinsic),
345 let res = match ret.layout().ty.sty {
346 TypeVariants::TyUint(_) => {
347 ::base::trans_int_binop(fx, bin_op, args[0], args[1], ret.layout().ty, false, false)
349 TypeVariants::TyInt(_) => {
350 ::base::trans_int_binop(fx, bin_op, args[0], args[1], ret.layout().ty, true, false)
354 ret.write_cvalue(fx, res);
357 assert_eq!(args.len(), 2);
358 let base = args[0].load_value(fx);
359 let offset = args[1].load_value(fx);
360 let res = fx.bcx.ins().iadd(base, offset);
361 ret.write_cvalue(fx, CValue::ByVal(res, args[0].layout()));
364 assert_eq!(args.len(), 1);
365 let src_ty = substs.type_at(0);
366 let dst_ty = substs.type_at(1);
367 assert_eq!(args[0].layout().ty, src_ty);
368 let addr = args[0].force_stack(fx);
369 let dst_layout = fx.layout_of(dst_ty);
370 ret.write_cvalue(fx, CValue::ByRef(addr, dst_layout))
373 assert_eq!(args.len(), 0);
374 let ty = substs.type_at(0);
375 let layout = fx.layout_of(ty);
376 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
377 kind: StackSlotKind::ExplicitSlot,
378 size: layout.size.bytes() as u32,
382 let uninit_place = CPlace::from_stack_slot(fx, stack_slot, ty);
383 let uninit_val = uninit_place.to_cvalue(fx);
384 ret.write_cvalue(fx, uninit_val);
386 _ => fx.tcx.sess.fatal(&format!("unsupported intrinsic {}", intrinsic)),
388 if let Some((_, dest)) = *destination {
389 let ret_ebb = fx.get_ebb(dest);
390 fx.bcx.ins().jump(ret_ebb, &[]);
392 fx.bcx.ins().trap(TrapCode::User(!0));
398 let return_ptr = match return_place {
399 Some(place) => place.expect_addr(),
400 None => fx.bcx.ins().iconst(types::I64, 0),
403 let call_args = Some(return_ptr).into_iter().chain(args.into_iter().map(|arg| {
404 if fx.cton_type(arg.layout().ty).is_some() {
409 })).collect::<Vec<_>>();
412 CValue::Func(func, _) => {
413 fx.bcx.ins().call(func, &call_args);
416 let func_ty = func.layout().ty;
417 let func = func.load_value(fx);
418 let sig = fx.bcx.import_signature(cton_sig_from_fn_ty(fx.tcx, func_ty));
419 fx.bcx.ins().call_indirect(sig, func, &call_args);
422 if let Some((_, dest)) = *destination {
423 let ret_ebb = fx.get_ebb(dest);
424 fx.bcx.ins().jump(ret_ebb, &[]);
426 fx.bcx.ins().trap(TrapCode::User(!0));