]> git.lizzy.rs Git - rust.git/blob - src/abi.rs
Deduplicate function name generation
[rust.git] / src / abi.rs
1 use std::iter;
2
3 use rustc::hir;
4 use rustc_target::spec::abi::Abi;
5
6 use crate::prelude::*;
7
8 #[derive(Debug)]
9 enum PassMode {
10     NoPass,
11     ByVal(Type),
12     ByRef,
13 }
14
15 impl PassMode {
16     fn get_param_ty(self, _fx: &FunctionCx) -> Type {
17         match self {
18             PassMode::NoPass => unimplemented!("pass mode nopass"),
19             PassMode::ByVal(cton_type) => cton_type,
20             PassMode::ByRef => types::I64,
21         }
22     }
23 }
24
25 fn get_pass_mode<'a, 'tcx: 'a>(
26     tcx: TyCtxt<'a, 'tcx, 'tcx>,
27     abi: Abi,
28     ty: Ty<'tcx>,
29     is_return: bool,
30 ) -> PassMode {
31     if ty.sty == tcx.mk_nil().sty {
32         if is_return {
33             //if false {
34             PassMode::NoPass
35         } else {
36             PassMode::ByRef
37         }
38     } else if let Some(ret_ty) = crate::common::cton_type_from_ty(tcx, ty) {
39         PassMode::ByVal(ret_ty)
40     } else {
41         if abi == Abi::C {
42             unimplemented!("Non scalars are not yet supported for \"C\" abi");
43         }
44         PassMode::ByRef
45     }
46 }
47
48 pub fn cton_sig_from_fn_ty<'a, 'tcx: 'a>(
49     tcx: TyCtxt<'a, 'tcx, 'tcx>,
50     fn_ty: Ty<'tcx>,
51 ) -> Signature {
52     let sig = ty_fn_sig(tcx, fn_ty);
53     assert!(!sig.variadic, "Variadic function are not yet supported");
54     let (call_conv, inputs, output): (CallConv, Vec<Ty>, Ty) = match sig.abi {
55         Abi::Rust => (CallConv::Fast, sig.inputs().to_vec(), sig.output()),
56         Abi::C => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
57         Abi::RustCall => {
58             println!(
59                 "rust-call sig: {:?} inputs: {:?} output: {:?}",
60                 sig,
61                 sig.inputs(),
62                 sig.output()
63             );
64             assert_eq!(sig.inputs().len(), 2);
65             let extra_args = match sig.inputs().last().unwrap().sty {
66                 ty::TyTuple(ref tupled_arguments) => tupled_arguments,
67                 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
68             };
69             let mut inputs: Vec<Ty> = vec![sig.inputs()[0]];
70             inputs.extend(extra_args.into_iter());
71             (CallConv::Fast, inputs, sig.output())
72         }
73         Abi::System => bug!("system abi should be selected elsewhere"),
74         Abi::RustIntrinsic => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
75         _ => unimplemented!("unsupported abi {:?}", sig.abi),
76     };
77
78     let inputs = inputs
79         .into_iter()
80         .filter_map(|ty| match get_pass_mode(tcx, sig.abi, ty, false) {
81             PassMode::ByVal(cton_ty) => Some(cton_ty),
82             PassMode::NoPass => unimplemented!("pass mode nopass"),
83             PassMode::ByRef => Some(types::I64),
84         });
85
86     let (params, returns) = match get_pass_mode(tcx, sig.abi, output, true) {
87         PassMode::NoPass => (inputs.map(AbiParam::new).collect(), vec![]),
88         PassMode::ByVal(ret_ty) => (
89             inputs.map(AbiParam::new).collect(),
90             vec![AbiParam::new(ret_ty)],
91         ),
92         PassMode::ByRef => {
93             (
94                 Some(types::I64).into_iter() // First param is place to put return val
95                     .chain(inputs)
96                     .map(AbiParam::new)
97                     .collect(),
98                 vec![],
99             )
100         }
101     };
102
103     Signature {
104         params,
105         returns,
106         call_conv,
107         argument_bytes: None,
108     }
109 }
110
111 fn ty_fn_sig<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> ty::FnSig<'tcx> {
112     let sig = match ty.sty {
113         ty::TyFnDef(..) |
114         // Shims currently have type TyFnPtr. Not sure this should remain.
115         ty::TyFnPtr(_) => ty.fn_sig(tcx),
116         ty::TyClosure(def_id, substs) => {
117             let sig = substs.closure_sig(def_id, tcx);
118
119             let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
120             sig.map_bound(|sig| tcx.mk_fn_sig(
121                 iter::once(*env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
122                 sig.output(),
123                 sig.variadic,
124                 sig.unsafety,
125                 sig.abi
126             ))
127         }
128         ty::TyGenerator(def_id, substs, _) => {
129             let sig = substs.poly_sig(def_id, tcx);
130
131             let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
132             let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
133
134             sig.map_bound(|sig| {
135                 let state_did = tcx.lang_items().gen_state().unwrap();
136                 let state_adt_ref = tcx.adt_def(state_did);
137                 let state_substs = tcx.intern_substs(&[
138                     sig.yield_ty.into(),
139                     sig.return_ty.into(),
140                 ]);
141                 let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
142
143                 tcx.mk_fn_sig(iter::once(env_ty),
144                     ret_ty,
145                     false,
146                     hir::Unsafety::Normal,
147                     Abi::Rust
148                 )
149             })
150         }
151         _ => bug!("unexpected type {:?} to ty_fn_sig", ty)
152     };
153     tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &sig)
154 }
155
156 fn get_function_name_and_sig<'a, 'tcx>(
157     tcx: TyCtxt<'a, 'tcx, 'tcx>,
158     inst: Instance<'tcx>,
159 ) -> (String, Signature) {
160     assert!(!inst.substs.needs_infer() && !inst.substs.has_param_types());
161     let fn_ty = inst.ty(tcx);
162     let sig = cton_sig_from_fn_ty(tcx, fn_ty);
163     let def_path_based_names =
164         ::rustc_mir::monomorphize::item::DefPathBasedNames::new(tcx, false, false);
165     let mut name = String::new();
166     def_path_based_names.push_instance_as_string(inst, &mut name);
167     (name, sig)
168 }
169
170 impl<'a, 'tcx: 'a> CodegenCx<'a, 'tcx, CurrentBackend> {
171     pub fn predefine_function(&mut self, inst: Instance<'tcx>) -> (FuncId, Function) {
172         let (name, sig) = crate::abi::get_function_name_and_sig(self.tcx, inst);
173         let func_id = self
174             .module
175             .declare_function(&name, Linkage::Export, &sig)
176             .unwrap();
177         let func =
178             Function::with_name_signature(ExternalName::user(0, func_id.index() as u32), sig);
179         (func_id, func)
180     }
181 }
182
183 impl<'a, 'tcx: 'a> FunctionCx<'a, 'tcx> {
184     /// Instance must be monomorphized
185     pub fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
186         let (name, sig) = get_function_name_and_sig(self.tcx, inst);
187         let func_id = self
188             .module
189             .declare_function(&name, Linkage::Import, &sig)
190             .unwrap();
191         self.module
192             .declare_func_in_func(func_id, &mut self.bcx.func)
193     }
194
195     fn lib_call(
196         &mut self,
197         name: &str,
198         input_tys: Vec<types::Type>,
199         output_ty: Option<types::Type>,
200         args: &[Value],
201     ) -> Option<Value> {
202         let sig = Signature {
203             params: input_tys.iter().cloned().map(AbiParam::new).collect(),
204             returns: vec![AbiParam::new(output_ty.unwrap_or(types::VOID))],
205             call_conv: CallConv::SystemV,
206             argument_bytes: None,
207         };
208         let func_id = self
209             .module
210             .declare_function(&name, Linkage::Import, &sig)
211             .unwrap();
212         let func_ref = self
213             .module
214             .declare_func_in_func(func_id, &mut self.bcx.func);
215         let call_inst = self.bcx.ins().call(func_ref, args);
216         if output_ty.is_none() {
217             return None;
218         }
219         let results = self.bcx.inst_results(call_inst);
220         assert_eq!(results.len(), 1);
221         Some(results[0])
222     }
223
224     pub fn easy_call(
225         &mut self,
226         name: &str,
227         args: &[CValue<'tcx>],
228         return_ty: Ty<'tcx>,
229     ) -> CValue<'tcx> {
230         let (input_tys, args): (Vec<_>, Vec<_>) = args
231             .into_iter()
232             .map(|arg| {
233                 (
234                     self.cton_type(arg.layout().ty).unwrap(),
235                     arg.load_value(self),
236                 )
237             }).unzip();
238         let return_layout = self.layout_of(return_ty);
239         let return_ty = if let TypeVariants::TyTuple(tup) = return_ty.sty {
240             if !tup.is_empty() {
241                 bug!("easy_call( (...) -> <non empty tuple> ) is not allowed");
242             }
243             None
244         } else {
245             Some(self.cton_type(return_ty).unwrap())
246         };
247         if let Some(val) = self.lib_call(name, input_tys, return_ty, &args) {
248             CValue::ByVal(val, return_layout)
249         } else {
250             CValue::ByRef(self.bcx.ins().iconst(types::I64, 0), return_layout)
251         }
252     }
253
254     fn self_sig(&self) -> FnSig<'tcx> {
255         ty_fn_sig(self.tcx, self.instance.ty(self.tcx))
256     }
257
258     fn return_type(&self) -> Ty<'tcx> {
259         self.self_sig().output()
260     }
261 }
262
263 pub fn codegen_fn_prelude<'a, 'tcx: 'a>(fx: &mut FunctionCx<'a, 'tcx>, start_ebb: Ebb) {
264     let ssa_analyzed = crate::analyze::analyze(fx);
265     fx.tcx.sess.warn(&format!("ssa {:?}", ssa_analyzed));
266
267     match fx.self_sig().abi {
268         Abi::Rust | Abi::RustCall => {}
269         _ => unimplemented!("declared function with non \"rust\" or \"rust-call\" abi"),
270     }
271
272     let ret_layout = fx.layout_of(fx.return_type());
273     let output_pass_mode = get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true);
274     let ret_param = match output_pass_mode {
275         PassMode::NoPass => {
276             None
277         }
278         PassMode::ByVal(ret_ty) => {
279             None
280         }
281         PassMode::ByRef => {
282             Some(fx.bcx.append_ebb_param(start_ebb, types::I64))
283         }
284     };
285
286     enum ArgKind {
287         Normal(Value),
288         Spread(Vec<Value>),
289     }
290
291     let func_params = fx.mir.args_iter().map(|local| {
292         let arg_ty = fx.monomorphize(&fx.mir.local_decls[local].ty);
293
294         // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
295         if Some(local) == fx.mir.spread_arg {
296             // This argument (e.g. the last argument in the "rust-call" ABI)
297             // is a tuple that was spread at the ABI level and now we have
298             // to reconstruct it into a tuple local variable, from multiple
299             // individual function arguments.
300
301             let tupled_arg_tys = match arg_ty.sty {
302                 ty::TyTuple(ref tys) => tys,
303                 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
304             };
305
306             let mut ebb_params = Vec::new();
307             for arg_ty in tupled_arg_tys.iter() {
308                 let cton_type = get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
309                 ebb_params.push(fx.bcx.append_ebb_param(start_ebb, cton_type));
310             }
311
312             (local, ArgKind::Spread(ebb_params), arg_ty)
313         } else {
314             let cton_type = get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
315             (local, ArgKind::Normal(fx.bcx.append_ebb_param(start_ebb, cton_type)), arg_ty)
316         }
317     }).collect::<Vec<(Local, ArgKind, Ty)>>();
318
319     match output_pass_mode {
320         PassMode::NoPass => {
321             let null = fx.bcx.ins().iconst(types::I64, 0);
322             //unimplemented!("pass mode nopass");
323             fx.local_map.insert(RETURN_PLACE, CPlace::Addr(null, fx.layout_of(fx.return_type())));
324         }
325         PassMode::ByVal(ret_ty) => {
326             let var = Variable(RETURN_PLACE);
327             fx.bcx.declare_var(var, ret_ty);
328             fx.local_map
329                 .insert(RETURN_PLACE, CPlace::Var(var, ret_layout));
330         }
331         PassMode::ByRef => {
332             fx.local_map
333                 .insert(RETURN_PLACE, CPlace::Addr(ret_param.unwrap(), ret_layout));
334         }
335     }
336
337     for (local, arg_kind, ty) in func_params {
338         let layout = fx.layout_of(ty);
339
340         if let ArgKind::Normal(ebb_param) = arg_kind {
341             if !ssa_analyzed
342                 .get(&local)
343                 .unwrap()
344                 .contains(crate::analyze::Flags::NOT_SSA)
345             {
346                 let var = Variable(local);
347                 fx.bcx.declare_var(var, fx.cton_type(ty).unwrap());
348                 match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false) {
349                     PassMode::NoPass => unimplemented!("pass mode nopass"),
350                     PassMode::ByVal(_) => fx.bcx.def_var(var, ebb_param),
351                     PassMode::ByRef => {
352                         let val = CValue::ByRef(ebb_param, fx.layout_of(ty)).load_value(fx);
353                         fx.bcx.def_var(var, val);
354                     }
355                 }
356                 fx.local_map.insert(local, CPlace::Var(var, layout));
357                 continue;
358             }
359         }
360
361         let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
362             kind: StackSlotKind::ExplicitSlot,
363             size: layout.size.bytes() as u32,
364             offset: None,
365         });
366
367         let place = CPlace::from_stack_slot(fx, stack_slot, ty);
368
369         match arg_kind {
370             ArgKind::Normal(ebb_param) => {
371                 match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false) {
372                     PassMode::NoPass => unimplemented!("pass mode nopass"),
373                     PassMode::ByVal(_) => place.write_cvalue(fx, CValue::ByVal(ebb_param, place.layout())),
374                     PassMode::ByRef => place.write_cvalue(fx, CValue::ByRef(ebb_param, place.layout())),
375                 }
376             }
377             ArgKind::Spread(ebb_params) => {
378                 for (i, ebb_param) in ebb_params.into_iter().enumerate() {
379                     let sub_place = place.place_field(fx, mir::Field::new(i));
380                     match get_pass_mode(fx.tcx, fx.self_sig().abi, sub_place.layout().ty, false) {
381                         PassMode::NoPass => unimplemented!("pass mode nopass"),
382                         PassMode::ByVal(_) => sub_place.write_cvalue(fx, CValue::ByVal(ebb_param, sub_place.layout())),
383                         PassMode::ByRef => sub_place.write_cvalue(fx, CValue::ByRef(ebb_param, sub_place.layout())),
384                     }
385                 }
386             }
387         }
388         fx.local_map.insert(local, place);
389     }
390
391     for local in fx.mir.vars_and_temps_iter() {
392         let ty = fx.mir.local_decls[local].ty;
393         let layout = fx.layout_of(ty);
394
395         let place = if ssa_analyzed
396             .get(&local)
397             .unwrap()
398             .contains(crate::analyze::Flags::NOT_SSA)
399         {
400             let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
401                 kind: StackSlotKind::ExplicitSlot,
402                 size: layout.size.bytes() as u32,
403                 offset: None,
404             });
405             CPlace::from_stack_slot(fx, stack_slot, ty)
406         } else {
407             let var = Variable(local);
408             fx.bcx.declare_var(var, fx.cton_type(ty).unwrap());
409             CPlace::Var(var, layout)
410         };
411
412         fx.local_map.insert(local, place);
413     }
414 }
415
416 pub fn codegen_call<'a, 'tcx: 'a>(
417     fx: &mut FunctionCx<'a, 'tcx>,
418     func: &Operand<'tcx>,
419     args: &[Operand<'tcx>],
420     destination: &Option<(Place<'tcx>, BasicBlock)>,
421 ) {
422     let func = trans_operand(fx, func);
423     let fn_ty = func.layout().ty;
424     let sig = ty_fn_sig(fx.tcx, fn_ty);
425
426     // Unpack arguments tuple for closures
427     let args = if sig.abi == Abi::RustCall {
428         assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
429         let self_arg = trans_operand(fx, &args[0]);
430         let pack_arg = trans_operand(fx, &args[1]);
431         let mut args = Vec::new();
432         args.push(self_arg);
433         match pack_arg.layout().ty.sty {
434             ty::TyTuple(ref tupled_arguments) => {
435                 for (i, _) in tupled_arguments.iter().enumerate() {
436                     args.push(pack_arg.value_field(fx, mir::Field::new(i)));
437                 }
438             }
439             _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
440         }
441         println!(
442             "{:?} {:?}",
443             pack_arg.layout().ty,
444             args.iter().map(|a| a.layout().ty).collect::<Vec<_>>()
445         );
446         args
447     } else {
448         args.into_iter()
449             .map(|arg| trans_operand(fx, arg))
450             .collect::<Vec<_>>()
451     };
452
453     let destination = destination.as_ref().map(|(place, bb)| {
454         (trans_place(fx, place), *bb)
455     });
456
457     if codegen_intrinsic_call(fx, fn_ty, sig, &args, destination) {
458         return;
459     }
460
461     let ret_layout = fx.layout_of(sig.output());
462
463     let output_pass_mode = get_pass_mode(fx.tcx, sig.abi, sig.output(), true);
464     println!("{:?}", output_pass_mode);
465     let return_ptr = match output_pass_mode {
466         PassMode::NoPass => None,
467         PassMode::ByRef => match destination {
468             Some((place, _)) => Some(place.expect_addr()),
469             None => Some(fx.bcx.ins().iconst(types::I64, 0)),
470         },
471         PassMode::ByVal(_) => None,
472     };
473
474     let call_args: Vec<Value> = return_ptr
475         .into_iter()
476         .chain(
477             args.into_iter()
478                 .map(|arg| match get_pass_mode(fx.tcx, sig.abi, arg.layout().ty, false) {
479                 PassMode::NoPass => unimplemented!("pass mode nopass"),
480                 PassMode::ByVal(_) => arg.load_value(fx),
481                 PassMode::ByRef => arg.force_stack(fx),
482             }),
483         ).collect::<Vec<_>>();
484
485     let inst = match func {
486         CValue::Func(func, _) => fx.bcx.ins().call(func, &call_args),
487         func => {
488             let func = func.load_value(fx);
489             let sig = fx.bcx.import_signature(cton_sig_from_fn_ty(fx.tcx, fn_ty));
490             fx.bcx.ins().call_indirect(sig, func, &call_args)
491         }
492     };
493
494     match output_pass_mode {
495         PassMode::NoPass => {}
496         PassMode::ByVal(_) => {
497             if let Some((ret_place, _)) = destination {
498                 let results = fx.bcx.inst_results(inst);
499                 ret_place.write_cvalue(fx, CValue::ByVal(results[0], ret_layout));
500             }
501         }
502         PassMode::ByRef => {}
503     }
504     if let Some((_, dest)) = destination {
505         let ret_ebb = fx.get_ebb(dest);
506         fx.bcx.ins().jump(ret_ebb, &[]);
507     } else {
508         fx.bcx.ins().trap(TrapCode::User(!0));
509     }
510 }
511
512 pub fn codegen_return(fx: &mut FunctionCx) {
513     match get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true) {
514         PassMode::NoPass | PassMode::ByRef => {
515             fx.bcx.ins().return_(&[]);
516         },
517         PassMode::ByVal(_) => {
518             let place = fx.get_local_place(RETURN_PLACE);
519             let ret_val = place.to_cvalue(fx).load_value(fx);
520             fx.bcx.ins().return_(&[ret_val]);
521         }
522     }
523 }
524
525 fn codegen_intrinsic_call<'a, 'tcx: 'a>(
526     fx: &mut FunctionCx<'a, 'tcx>,
527     fn_ty: Ty<'tcx>,
528     sig: FnSig<'tcx>,
529     args: &[CValue<'tcx>],
530     destination: Option<(CPlace<'tcx>, BasicBlock)>,
531 ) -> bool {
532     if let TypeVariants::TyFnDef(def_id, substs) = fn_ty.sty {
533         if sig.abi == Abi::RustIntrinsic {
534             let intrinsic = fx.tcx.item_name(def_id).as_str();
535             let intrinsic = &intrinsic[..];
536
537             let ret = match destination {
538                 Some((place, _)) => place,
539                 None => {
540                     println!(
541                         "codegen_call(fx, _, {:?}, {:?})",
542                         args, destination
543                     );
544                     // Insert non returning intrinsics here
545                     match intrinsic {
546                         "abort" => {
547                             fx.bcx.ins().trap(TrapCode::User(!0 - 1));
548                         }
549                         "unreachable" => {
550                             fx.bcx.ins().trap(TrapCode::User(!0 - 1));
551                         }
552                         _ => unimplemented!("unsupported instrinsic {}", intrinsic),
553                     }
554                     return true;
555                 }
556             };
557
558             let nil_ty = fx.tcx.mk_nil();
559             let u64_layout = fx.layout_of(fx.tcx.types.u64);
560             let usize_layout = fx.layout_of(fx.tcx.types.usize);
561
562             match intrinsic {
563                 "assume" => {
564                     assert_eq!(args.len(), 1);
565                 }
566                 "arith_offset" => {
567                     assert_eq!(args.len(), 2);
568                     let base = args[0].load_value(fx);
569                     let offset = args[1].load_value(fx);
570                     let res = fx.bcx.ins().iadd(base, offset);
571                     let res = CValue::ByVal(res, ret.layout());
572                     ret.write_cvalue(fx, res);
573                 }
574                 "likely" | "unlikely" => {
575                     assert_eq!(args.len(), 1);
576                     ret.write_cvalue(fx, args[0]);
577                 }
578                 "copy" | "copy_nonoverlapping" => {
579                     let elem_ty = substs.type_at(0);
580                     let elem_size: u64 = fx.layout_of(elem_ty).size.bytes();
581                     let elem_size = fx.bcx.ins().iconst(types::I64, elem_size as i64);
582                     assert_eq!(args.len(), 3);
583                     let src = args[0];
584                     let dst = args[1];
585                     let count = args[2].load_value(fx);
586                     let byte_amount = fx.bcx.ins().imul(count, elem_size);
587                     fx.easy_call(
588                         "memmove",
589                         &[dst, src, CValue::ByVal(byte_amount, usize_layout)],
590                         nil_ty,
591                     );
592                 }
593                 "discriminant_value" => {
594                     assert_eq!(args.len(), 1);
595                     let discr = crate::base::trans_get_discriminant(fx, args[0], ret.layout());
596                     ret.write_cvalue(fx, discr);
597                 }
598                 "size_of" => {
599                     assert_eq!(args.len(), 0);
600                     let size_of = fx.layout_of(substs.type_at(0)).size.bytes();
601                     let size_of = CValue::const_val(fx, usize_layout.ty, size_of as i64);
602                     ret.write_cvalue(fx, size_of);
603                 }
604                 "type_id" => {
605                     assert_eq!(args.len(), 0);
606                     let type_id = fx.tcx.type_id_hash(substs.type_at(0));
607                     let type_id = CValue::const_val(fx, u64_layout.ty, type_id as i64);
608                     ret.write_cvalue(fx, type_id);
609                 }
610                 "min_align_of" => {
611                     assert_eq!(args.len(), 0);
612                     let min_align = fx.layout_of(substs.type_at(0)).align.abi();
613                     let min_align = CValue::const_val(fx, usize_layout.ty, min_align as i64);
614                     ret.write_cvalue(fx, min_align);
615                 }
616                 _ if intrinsic.starts_with("unchecked_") => {
617                     assert_eq!(args.len(), 2);
618                     let bin_op = match intrinsic {
619                         "unchecked_div" => BinOp::Div,
620                         "unchecked_rem" => BinOp::Rem,
621                         "unchecked_shl" => BinOp::Shl,
622                         "unchecked_shr" => BinOp::Shr,
623                         _ => unimplemented!("intrinsic {}", intrinsic),
624                     };
625                     let res = match ret.layout().ty.sty {
626                         TypeVariants::TyUint(_) => crate::base::trans_int_binop(
627                             fx,
628                             bin_op,
629                             args[0],
630                             args[1],
631                             ret.layout().ty,
632                             false,
633                         ),
634                         TypeVariants::TyInt(_) => crate::base::trans_int_binop(
635                             fx,
636                             bin_op,
637                             args[0],
638                             args[1],
639                             ret.layout().ty,
640                             true,
641                         ),
642                         _ => panic!(),
643                     };
644                     ret.write_cvalue(fx, res);
645                 }
646                 _ if intrinsic.ends_with("_with_overflow") => {
647                     assert_eq!(args.len(), 2);
648                     assert_eq!(args[0].layout().ty, args[1].layout().ty);
649                     let bin_op = match intrinsic {
650                         "add_with_overflow" => BinOp::Add,
651                         "sub_with_overflow" => BinOp::Sub,
652                         "mul_with_overflow" => BinOp::Mul,
653                         _ => unimplemented!("intrinsic {}", intrinsic),
654                     };
655                     let res = match args[0].layout().ty.sty {
656                         TypeVariants::TyUint(_) => crate::base::trans_checked_int_binop(
657                             fx,
658                             bin_op,
659                             args[0],
660                             args[1],
661                             ret.layout().ty,
662                             false,
663                         ),
664                         TypeVariants::TyInt(_) => crate::base::trans_checked_int_binop(
665                             fx,
666                             bin_op,
667                             args[0],
668                             args[1],
669                             ret.layout().ty,
670                             true,
671                         ),
672                         _ => panic!(),
673                     };
674                     ret.write_cvalue(fx, res);
675                 }
676                 _ if intrinsic.starts_with("overflowing_") => {
677                     assert_eq!(args.len(), 2);
678                     assert_eq!(args[0].layout().ty, args[1].layout().ty);
679                     let bin_op = match intrinsic {
680                         "overflowing_add" => BinOp::Add,
681                         "overflowing_sub" => BinOp::Sub,
682                         "overflowing_mul" => BinOp::Mul,
683                         _ => unimplemented!("intrinsic {}", intrinsic),
684                     };
685                     let res = match args[0].layout().ty.sty {
686                         TypeVariants::TyUint(_) => crate::base::trans_int_binop(
687                             fx,
688                             bin_op,
689                             args[0],
690                             args[1],
691                             ret.layout().ty,
692                             false,
693                         ),
694                         TypeVariants::TyInt(_) => crate::base::trans_int_binop(
695                             fx,
696                             bin_op,
697                             args[0],
698                             args[1],
699                             ret.layout().ty,
700                             true,
701                         ),
702                         _ => panic!(),
703                     };
704                     ret.write_cvalue(fx, res);
705                 }
706                 "offset" => {
707                     assert_eq!(args.len(), 2);
708                     let base = args[0].load_value(fx);
709                     let offset = args[1].load_value(fx);
710                     let res = fx.bcx.ins().iadd(base, offset);
711                     ret.write_cvalue(fx, CValue::ByVal(res, args[0].layout()));
712                 }
713                 "transmute" => {
714                     assert_eq!(args.len(), 1);
715                     let src_ty = substs.type_at(0);
716                     let dst_ty = substs.type_at(1);
717                     assert_eq!(args[0].layout().ty, src_ty);
718                     let addr = args[0].force_stack(fx);
719                     let dst_layout = fx.layout_of(dst_ty);
720                     ret.write_cvalue(fx, CValue::ByRef(addr, dst_layout))
721                 }
722                 "uninit" => {
723                     assert_eq!(args.len(), 0);
724                     let ty = substs.type_at(0);
725                     let layout = fx.layout_of(ty);
726                     let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
727                         kind: StackSlotKind::ExplicitSlot,
728                         size: layout.size.bytes() as u32,
729                         offset: None,
730                     });
731
732                     let uninit_place = CPlace::from_stack_slot(fx, stack_slot, ty);
733                     let uninit_val = uninit_place.to_cvalue(fx);
734                     ret.write_cvalue(fx, uninit_val);
735                 }
736                 "ctlz" | "ctlz_nonzero" => {
737                     assert_eq!(args.len(), 1);
738                     let arg = args[0].load_value(fx);
739                     let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
740                     ret.write_cvalue(fx, res);
741                 }
742                 "cttz" | "cttz_nonzero" => {
743                     assert_eq!(args.len(), 1);
744                     let arg = args[0].load_value(fx);
745                     let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
746                     ret.write_cvalue(fx, res);
747                 }
748                 "ctpop" => {
749                     assert_eq!(args.len(), 1);
750                     let arg = args[0].load_value(fx);
751                     let res = CValue::ByVal(fx.bcx.ins().popcnt(arg), args[0].layout());
752                     ret.write_cvalue(fx, res);
753                 }
754                 _ => unimpl!("unsupported intrinsic {}", intrinsic),
755             }
756
757             if let Some((_, dest)) = destination {
758                 let ret_ebb = fx.get_ebb(dest);
759                 fx.bcx.ins().jump(ret_ebb, &[]);
760             } else {
761                 fx.bcx.ins().trap(TrapCode::User(!0));
762             }
763             return true;
764         }
765     }
766
767     false
768 }