]> git.lizzy.rs Git - rust.git/blob - src/abi.rs
Implement drop support (fixes #11)
[rust.git] / src / abi.rs
1 use std::iter;
2
3 use rustc::hir;
4 use rustc_target::spec::abi::Abi;
5
6 use crate::prelude::*;
7
8 #[derive(Debug)]
9 enum PassMode {
10     NoPass,
11     ByVal(Type),
12     ByRef,
13 }
14
15 impl PassMode {
16     fn get_param_ty(self, fx: &FunctionCx<impl Backend>) -> Type {
17         match self {
18             PassMode::NoPass => unimplemented!("pass mode nopass"),
19             PassMode::ByVal(cton_type) => cton_type,
20             PassMode::ByRef => fx.module.pointer_type(),
21         }
22     }
23 }
24
25 fn get_pass_mode<'a, 'tcx: 'a>(
26     tcx: TyCtxt<'a, 'tcx, 'tcx>,
27     abi: Abi,
28     ty: Ty<'tcx>,
29     is_return: bool,
30 ) -> PassMode {
31     assert!(
32         !tcx.layout_of(ParamEnv::reveal_all().and(ty))
33             .unwrap()
34             .is_unsized()
35     );
36     if ty.sty == tcx.mk_nil().sty {
37         if is_return {
38             //if false {
39             PassMode::NoPass
40         } else {
41             PassMode::ByRef
42         }
43     } else if let Some(ret_ty) = crate::common::cton_type_from_ty(tcx, ty) {
44         PassMode::ByVal(ret_ty)
45     } else {
46         if abi == Abi::C {
47             unimplemented!("Non scalars are not yet supported for \"C\" abi");
48         }
49         PassMode::ByRef
50     }
51 }
52
53 fn adjust_arg_for_abi<'a, 'tcx: 'a>(
54     fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
55     sig: FnSig<'tcx>,
56     arg: CValue<'tcx>,
57 ) -> Value {
58     match get_pass_mode(fx.tcx, sig.abi, arg.layout().ty, false) {
59         PassMode::NoPass => unimplemented!("pass mode nopass"),
60         PassMode::ByVal(_) => arg.load_value(fx),
61         PassMode::ByRef => arg.force_stack(fx),
62     }
63 }
64
65 pub fn cton_sig_from_fn_ty<'a, 'tcx: 'a>(
66     tcx: TyCtxt<'a, 'tcx, 'tcx>,
67     fn_ty: Ty<'tcx>,
68 ) -> Signature {
69     let sig = ty_fn_sig(tcx, fn_ty);
70     assert!(!sig.variadic, "Variadic function are not yet supported");
71     let (call_conv, inputs, output): (CallConv, Vec<Ty>, Ty) = match sig.abi {
72         Abi::Rust => (CallConv::Fast, sig.inputs().to_vec(), sig.output()),
73         Abi::C => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
74         Abi::RustCall => {
75             assert_eq!(sig.inputs().len(), 2);
76             let extra_args = match sig.inputs().last().unwrap().sty {
77                 ty::Tuple(ref tupled_arguments) => tupled_arguments,
78                 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
79             };
80             let mut inputs: Vec<Ty> = vec![sig.inputs()[0]];
81             inputs.extend(extra_args.into_iter());
82             (CallConv::Fast, inputs, sig.output())
83         }
84         Abi::System => bug!("system abi should be selected elsewhere"),
85         Abi::RustIntrinsic => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
86         _ => unimplemented!("unsupported abi {:?}", sig.abi),
87     };
88
89     let inputs = inputs
90         .into_iter()
91         .filter_map(|ty| match get_pass_mode(tcx, sig.abi, ty, false) {
92             PassMode::ByVal(cton_ty) => Some(cton_ty),
93             PassMode::NoPass => unimplemented!("pass mode nopass"),
94             PassMode::ByRef => Some(pointer_ty(tcx)),
95         });
96
97     let (params, returns) = match get_pass_mode(tcx, sig.abi, output, true) {
98         PassMode::NoPass => (inputs.map(AbiParam::new).collect(), vec![]),
99         PassMode::ByVal(ret_ty) => (
100             inputs.map(AbiParam::new).collect(),
101             vec![AbiParam::new(ret_ty)],
102         ),
103         PassMode::ByRef => {
104             (
105                 Some(pointer_ty(tcx)) // First param is place to put return val
106                     .into_iter()
107                     .chain(inputs)
108                     .map(AbiParam::new)
109                     .collect(),
110                 vec![],
111             )
112         }
113     };
114
115     Signature {
116         params,
117         returns,
118         call_conv,
119     }
120 }
121
122 fn ty_fn_sig<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> ty::FnSig<'tcx> {
123     let sig = match ty.sty {
124         ty::FnDef(..) |
125         // Shims currently have type TyFnPtr. Not sure this should remain.
126         ty::FnPtr(_) => ty.fn_sig(tcx),
127         ty::Closure(def_id, substs) => {
128             let sig = substs.closure_sig(def_id, tcx);
129
130             let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
131             sig.map_bound(|sig| tcx.mk_fn_sig(
132                 iter::once(*env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
133                 sig.output(),
134                 sig.variadic,
135                 sig.unsafety,
136                 sig.abi
137             ))
138         }
139         ty::Generator(def_id, substs, _) => {
140             let sig = substs.poly_sig(def_id, tcx);
141
142             let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
143             let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
144
145             sig.map_bound(|sig| {
146                 let state_did = tcx.lang_items().gen_state().unwrap();
147                 let state_adt_ref = tcx.adt_def(state_did);
148                 let state_substs = tcx.intern_substs(&[
149                     sig.yield_ty.into(),
150                     sig.return_ty.into(),
151                 ]);
152                 let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
153
154                 tcx.mk_fn_sig(iter::once(env_ty),
155                     ret_ty,
156                     false,
157                     hir::Unsafety::Normal,
158                     Abi::Rust
159                 )
160             })
161         }
162         _ => bug!("unexpected type {:?} to ty_fn_sig", ty)
163     };
164     tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &sig)
165 }
166
167 pub fn get_function_name_and_sig<'a, 'tcx>(
168     tcx: TyCtxt<'a, 'tcx, 'tcx>,
169     inst: Instance<'tcx>,
170 ) -> (String, Signature) {
171     assert!(!inst.substs.needs_infer() && !inst.substs.has_param_types());
172     let fn_ty = inst.ty(tcx);
173     let sig = cton_sig_from_fn_ty(tcx, fn_ty);
174     (tcx.symbol_name(inst).as_str().to_string(), sig)
175 }
176
177 impl<'a, 'tcx: 'a, B: Backend + 'a> FunctionCx<'a, 'tcx, B> {
178     /// Instance must be monomorphized
179     pub fn get_function_id(&mut self, inst: Instance<'tcx>) -> FuncId {
180         let (name, sig) = get_function_name_and_sig(self.tcx, inst);
181         self.module
182             .declare_function(&name, Linkage::Import, &sig)
183             .unwrap()
184     }
185
186     /// Instance must be monomorphized
187     pub fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
188         let func_id = self.get_function_id(inst);
189         self.module
190             .declare_func_in_func(func_id, &mut self.bcx.func)
191     }
192
193     fn lib_call(
194         &mut self,
195         name: &str,
196         input_tys: Vec<types::Type>,
197         output_ty: Option<types::Type>,
198         args: &[Value],
199     ) -> Option<Value> {
200         let sig = Signature {
201             params: input_tys.iter().cloned().map(AbiParam::new).collect(),
202             returns: output_ty
203                 .map(|output_ty| vec![AbiParam::new(output_ty)])
204                 .unwrap_or(Vec::new()),
205             call_conv: CallConv::SystemV,
206         };
207         let func_id = self
208             .module
209             .declare_function(&name, Linkage::Import, &sig)
210             .unwrap();
211         let func_ref = self
212             .module
213             .declare_func_in_func(func_id, &mut self.bcx.func);
214         let call_inst = self.bcx.ins().call(func_ref, args);
215         if output_ty.is_none() {
216             return None;
217         }
218         let results = self.bcx.inst_results(call_inst);
219         assert_eq!(results.len(), 1);
220         Some(results[0])
221     }
222
223     pub fn easy_call(
224         &mut self,
225         name: &str,
226         args: &[CValue<'tcx>],
227         return_ty: Ty<'tcx>,
228     ) -> CValue<'tcx> {
229         let (input_tys, args): (Vec<_>, Vec<_>) = args
230             .into_iter()
231             .map(|arg| {
232                 (
233                     self.cton_type(arg.layout().ty).unwrap(),
234                     arg.load_value(self),
235                 )
236             }).unzip();
237         let return_layout = self.layout_of(return_ty);
238         let return_ty = if let ty::Tuple(tup) = return_ty.sty {
239             if !tup.is_empty() {
240                 bug!("easy_call( (...) -> <non empty tuple> ) is not allowed");
241             }
242             None
243         } else {
244             Some(self.cton_type(return_ty).unwrap())
245         };
246         if let Some(val) = self.lib_call(name, input_tys, return_ty, &args) {
247             CValue::ByVal(val, return_layout)
248         } else {
249             CValue::ByRef(
250                 self.bcx.ins().iconst(self.module.pointer_type(), 0),
251                 return_layout,
252             )
253         }
254     }
255
256     fn self_sig(&self) -> FnSig<'tcx> {
257         ty_fn_sig(self.tcx, self.instance.ty(self.tcx))
258     }
259
260     fn return_type(&self) -> Ty<'tcx> {
261         self.self_sig().output()
262     }
263 }
264
265 pub fn codegen_fn_prelude<'a, 'tcx: 'a>(
266     fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
267     start_ebb: Ebb,
268 ) {
269     let ssa_analyzed = crate::analyze::analyze(fx);
270
271     let ret_layout = fx.layout_of(fx.return_type());
272     let output_pass_mode = get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true);
273     let ret_param = match output_pass_mode {
274         PassMode::NoPass => None,
275         PassMode::ByVal(_) => None,
276         PassMode::ByRef => Some(fx.bcx.append_ebb_param(start_ebb, fx.module.pointer_type())),
277     };
278
279     enum ArgKind {
280         Normal(Value),
281         Spread(Vec<Value>),
282     }
283
284     let func_params = fx
285         .mir
286         .args_iter()
287         .map(|local| {
288             let arg_ty = fx.monomorphize(&fx.mir.local_decls[local].ty);
289
290             // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
291             if Some(local) == fx.mir.spread_arg {
292                 // This argument (e.g. the last argument in the "rust-call" ABI)
293                 // is a tuple that was spread at the ABI level and now we have
294                 // to reconstruct it into a tuple local variable, from multiple
295                 // individual function arguments.
296
297                 let tupled_arg_tys = match arg_ty.sty {
298                     ty::Tuple(ref tys) => tys,
299                     _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
300                 };
301
302                 let mut ebb_params = Vec::new();
303                 for arg_ty in tupled_arg_tys.iter() {
304                     let cton_type =
305                         get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
306                     ebb_params.push(fx.bcx.append_ebb_param(start_ebb, cton_type));
307                 }
308
309                 (local, ArgKind::Spread(ebb_params), arg_ty)
310             } else {
311                 let cton_type =
312                     get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
313                 (
314                     local,
315                     ArgKind::Normal(fx.bcx.append_ebb_param(start_ebb, cton_type)),
316                     arg_ty,
317                 )
318             }
319         }).collect::<Vec<(Local, ArgKind, Ty)>>();
320
321     fx.bcx.switch_to_block(start_ebb);
322
323     fx.top_nop = Some(fx.bcx.ins().nop());
324     fx.add_global_comment(format!("ssa {:?}", ssa_analyzed));
325
326     match output_pass_mode {
327         PassMode::NoPass => {
328             let null = fx.bcx.ins().iconst(fx.module.pointer_type(), 0);
329             //unimplemented!("pass mode nopass");
330             fx.local_map.insert(
331                 RETURN_PLACE,
332                 CPlace::Addr(null, None, fx.layout_of(fx.return_type())),
333             );
334         }
335         PassMode::ByVal(ret_ty) => {
336             fx.bcx.declare_var(mir_var(RETURN_PLACE), ret_ty);
337             fx.local_map
338                 .insert(RETURN_PLACE, CPlace::Var(RETURN_PLACE, ret_layout));
339         }
340         PassMode::ByRef => {
341             fx.local_map.insert(
342                 RETURN_PLACE,
343                 CPlace::Addr(ret_param.unwrap(), None, ret_layout),
344             );
345         }
346     }
347
348     for (local, arg_kind, ty) in func_params {
349         let layout = fx.layout_of(ty);
350
351         if let ArgKind::Normal(ebb_param) = arg_kind {
352             if !ssa_analyzed
353                 .get(&local)
354                 .unwrap()
355                 .contains(crate::analyze::Flags::NOT_SSA)
356             {
357                 fx.bcx
358                     .declare_var(mir_var(local), fx.cton_type(ty).unwrap());
359                 match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false) {
360                     PassMode::NoPass => unimplemented!("pass mode nopass"),
361                     PassMode::ByVal(_) => fx.bcx.def_var(mir_var(local), ebb_param),
362                     PassMode::ByRef => {
363                         let val = CValue::ByRef(ebb_param, fx.layout_of(ty)).load_value(fx);
364                         fx.bcx.def_var(mir_var(local), val);
365                     }
366                 }
367                 fx.local_map.insert(local, CPlace::Var(local, layout));
368                 continue;
369             }
370         }
371
372         let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
373             kind: StackSlotKind::ExplicitSlot,
374             size: layout.size.bytes() as u32,
375             offset: None,
376         });
377
378         let place = CPlace::from_stack_slot(fx, stack_slot, ty);
379
380         match arg_kind {
381             ArgKind::Normal(ebb_param) => match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false)
382             {
383                 PassMode::NoPass => unimplemented!("pass mode nopass"),
384                 PassMode::ByVal(_) => {
385                     place.write_cvalue(fx, CValue::ByVal(ebb_param, place.layout()))
386                 }
387                 PassMode::ByRef => place.write_cvalue(fx, CValue::ByRef(ebb_param, place.layout())),
388             },
389             ArgKind::Spread(ebb_params) => {
390                 for (i, ebb_param) in ebb_params.into_iter().enumerate() {
391                     let sub_place = place.place_field(fx, mir::Field::new(i));
392                     match get_pass_mode(fx.tcx, fx.self_sig().abi, sub_place.layout().ty, false) {
393                         PassMode::NoPass => unimplemented!("pass mode nopass"),
394                         PassMode::ByVal(_) => {
395                             sub_place.write_cvalue(fx, CValue::ByVal(ebb_param, sub_place.layout()))
396                         }
397                         PassMode::ByRef => {
398                             sub_place.write_cvalue(fx, CValue::ByRef(ebb_param, sub_place.layout()))
399                         }
400                     }
401                 }
402             }
403         }
404         fx.local_map.insert(local, place);
405     }
406
407     for local in fx.mir.vars_and_temps_iter() {
408         let ty = fx.mir.local_decls[local].ty;
409         let layout = fx.layout_of(ty);
410
411         let place = if ssa_analyzed
412             .get(&local)
413             .unwrap()
414             .contains(crate::analyze::Flags::NOT_SSA)
415         {
416             let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
417                 kind: StackSlotKind::ExplicitSlot,
418                 size: layout.size.bytes() as u32,
419                 offset: None,
420             });
421             CPlace::from_stack_slot(fx, stack_slot, ty)
422         } else {
423             fx.bcx
424                 .declare_var(mir_var(local), fx.cton_type(ty).unwrap());
425             CPlace::Var(local, layout)
426         };
427
428         fx.local_map.insert(local, place);
429     }
430
431     fx.bcx
432         .ins()
433         .jump(*fx.ebb_map.get(&START_BLOCK).unwrap(), &[]);
434 }
435
436 pub fn codegen_terminator_call<'a, 'tcx: 'a>(
437     fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
438     func: &Operand<'tcx>,
439     args: &[Operand<'tcx>],
440     destination: &Option<(Place<'tcx>, BasicBlock)>,
441 ) {
442     let fn_ty = fx.monomorphize(&func.ty(&fx.mir.local_decls, fx.tcx));
443     let sig = ty_fn_sig(fx.tcx, fn_ty);
444
445     // Unpack arguments tuple for closures
446     let args = if sig.abi == Abi::RustCall {
447         assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
448         let self_arg = trans_operand(fx, &args[0]);
449         let pack_arg = trans_operand(fx, &args[1]);
450         let mut args = Vec::new();
451         args.push(self_arg);
452         match pack_arg.layout().ty.sty {
453             ty::Tuple(ref tupled_arguments) => {
454                 for (i, _) in tupled_arguments.iter().enumerate() {
455                     args.push(pack_arg.value_field(fx, mir::Field::new(i)));
456                 }
457             }
458             _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
459         }
460         args
461     } else {
462         args.into_iter()
463             .map(|arg| trans_operand(fx, arg))
464             .collect::<Vec<_>>()
465     };
466
467     let destination = destination
468         .as_ref()
469         .map(|&(ref place, bb)| (trans_place(fx, place), bb));
470
471     if !codegen_intrinsic_call(fx, fn_ty, &args, destination) {
472         codegen_call_inner(
473             fx,
474             Some(func),
475             fn_ty,
476             args,
477             destination.map(|(place, _)| place),
478         );
479
480         if let Some((_, dest)) = destination {
481             let ret_ebb = fx.get_ebb(dest);
482             fx.bcx.ins().jump(ret_ebb, &[]);
483         } else {
484             fx.bcx.ins().trap(TrapCode::User(!0));
485         }
486     }
487 }
488
489 pub fn codegen_call_inner<'a, 'tcx: 'a>(
490     fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
491     func: Option<&Operand<'tcx>>,
492     fn_ty: Ty<'tcx>,
493     args: Vec<CValue<'tcx>>,
494     ret_place: Option<CPlace<'tcx>>,
495 ) {
496     let sig = ty_fn_sig(fx.tcx, fn_ty);
497
498     let ret_layout = fx.layout_of(sig.output());
499
500     let output_pass_mode = get_pass_mode(fx.tcx, sig.abi, sig.output(), true);
501     let return_ptr = match output_pass_mode {
502         PassMode::NoPass => None,
503         PassMode::ByRef => match ret_place {
504             Some(ret_place) => Some(ret_place.expect_addr()),
505             None => Some(fx.bcx.ins().iconst(fx.module.pointer_type(), 0)),
506         },
507         PassMode::ByVal(_) => None,
508     };
509
510     let instance = match fn_ty.sty {
511         ty::FnDef(def_id, substs) => {
512             Some(Instance::resolve(fx.tcx, ParamEnv::reveal_all(), def_id, substs).unwrap())
513         }
514         _ => None,
515     };
516
517     let func_ref: Option<Value>; // Indirect call target
518
519     let first_arg = {
520         if let Some(Instance {
521             def: InstanceDef::Virtual(_, idx),
522             ..
523         }) = instance
524         {
525             let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
526             func_ref = Some(method);
527             Some(ptr)
528         } else {
529             func_ref = if instance.is_none() {
530                 let func = trans_operand(fx, func.expect("indirect call without func Operand"));
531                 Some(func.load_value(fx))
532             } else {
533                 None
534             };
535
536             args.get(0).map(|arg| adjust_arg_for_abi(fx, sig, *arg))
537         }.into_iter()
538     };
539
540     let call_args: Vec<Value> = return_ptr
541         .into_iter()
542         .chain(first_arg)
543         .chain(
544             args.into_iter()
545                 .skip(1)
546                 .map(|arg| adjust_arg_for_abi(fx, sig, arg)),
547         ).collect::<Vec<_>>();
548
549     let sig = fx.bcx.import_signature(cton_sig_from_fn_ty(fx.tcx, fn_ty));
550     let call_inst = if let Some(func_ref) = func_ref {
551         fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
552     } else {
553         let func_ref = fx.get_function_ref(instance.expect("non-indirect call on non-FnDef type"));
554         fx.bcx.ins().call(func_ref, &call_args)
555     };
556
557     match output_pass_mode {
558         PassMode::NoPass => {}
559         PassMode::ByVal(_) => {
560             if let Some(ret_place) = ret_place {
561                 let results = fx.bcx.inst_results(call_inst);
562                 ret_place.write_cvalue(fx, CValue::ByVal(results[0], ret_layout));
563             }
564         }
565         PassMode::ByRef => {}
566     }
567 }
568
569 pub fn codegen_return(fx: &mut FunctionCx<impl Backend>) {
570     match get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true) {
571         PassMode::NoPass | PassMode::ByRef => {
572             fx.bcx.ins().return_(&[]);
573         }
574         PassMode::ByVal(_) => {
575             let place = fx.get_local_place(RETURN_PLACE);
576             let ret_val = place.to_cvalue(fx).load_value(fx);
577             fx.bcx.ins().return_(&[ret_val]);
578         }
579     }
580 }
581
582 fn codegen_intrinsic_call<'a, 'tcx: 'a>(
583     fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
584     fn_ty: Ty<'tcx>,
585     args: &[CValue<'tcx>],
586     destination: Option<(CPlace<'tcx>, BasicBlock)>,
587 ) -> bool {
588     if let ty::FnDef(def_id, substs) = fn_ty.sty {
589         let sig = ty_fn_sig(fx.tcx, fn_ty);
590
591         if sig.abi == Abi::RustIntrinsic {
592             let intrinsic = fx.tcx.item_name(def_id).as_str();
593             let intrinsic = &intrinsic[..];
594
595             let ret = match destination {
596                 Some((place, _)) => place,
597                 None => {
598                     // Insert non returning intrinsics here
599                     match intrinsic {
600                         "abort" => {
601                             fx.bcx.ins().trap(TrapCode::User(!0 - 1));
602                         }
603                         "unreachable" => {
604                             fx.bcx.ins().trap(TrapCode::User(!0 - 1));
605                         }
606                         _ => unimplemented!("unsupported instrinsic {}", intrinsic),
607                     }
608                     return true;
609                 }
610             };
611
612             let nil_ty = fx.tcx.mk_nil();
613             let u64_layout = fx.layout_of(fx.tcx.types.u64);
614             let usize_layout = fx.layout_of(fx.tcx.types.usize);
615
616             match intrinsic {
617                 "assume" => {
618                     assert_eq!(args.len(), 1);
619                 }
620                 "arith_offset" => {
621                     assert_eq!(args.len(), 2);
622                     let base = args[0].load_value(fx);
623                     let offset = args[1].load_value(fx);
624                     let res = fx.bcx.ins().iadd(base, offset);
625                     let res = CValue::ByVal(res, ret.layout());
626                     ret.write_cvalue(fx, res);
627                 }
628                 "likely" | "unlikely" => {
629                     assert_eq!(args.len(), 1);
630                     ret.write_cvalue(fx, args[0]);
631                 }
632                 "copy" | "copy_nonoverlapping" => {
633                     let elem_ty = substs.type_at(0);
634                     let elem_size: u64 = fx.layout_of(elem_ty).size.bytes();
635                     let elem_size = fx
636                         .bcx
637                         .ins()
638                         .iconst(fx.module.pointer_type(), elem_size as i64);
639                     assert_eq!(args.len(), 3);
640                     let src = args[0];
641                     let dst = args[1];
642                     let count = args[2].load_value(fx);
643                     let byte_amount = fx.bcx.ins().imul(count, elem_size);
644                     fx.easy_call(
645                         "memmove",
646                         &[dst, src, CValue::ByVal(byte_amount, usize_layout)],
647                         nil_ty,
648                     );
649                 }
650                 "discriminant_value" => {
651                     assert_eq!(args.len(), 1);
652                     let discr = crate::base::trans_get_discriminant(fx, args[0], ret.layout());
653                     ret.write_cvalue(fx, discr);
654                 }
655                 "size_of" => {
656                     assert_eq!(args.len(), 0);
657                     let size_of = fx.layout_of(substs.type_at(0)).size.bytes();
658                     let size_of = CValue::const_val(fx, usize_layout.ty, size_of as i64);
659                     ret.write_cvalue(fx, size_of);
660                 }
661                 "size_of_val" => {
662                     assert_eq!(args.len(), 1);
663                     let size = match &substs.type_at(0).sty {
664                         ty::Slice(elem) => {
665                             let len = args[0].load_value_pair(fx).1;
666                             let elem_size = fx.layout_of(elem).size.bytes();
667                             fx.bcx.ins().imul_imm(len, elem_size as i64)
668                         }
669                         ty::Dynamic(..) => crate::vtable::size_of_obj(fx, args[0]),
670                         ty => unimplemented!("size_of_val for {:?}", ty),
671                     };
672                     ret.write_cvalue(fx, CValue::ByVal(size, usize_layout));
673                 }
674                 "type_id" => {
675                     assert_eq!(args.len(), 0);
676                     let type_id = fx.tcx.type_id_hash(substs.type_at(0));
677                     let type_id = CValue::const_val(fx, u64_layout.ty, type_id as i64);
678                     ret.write_cvalue(fx, type_id);
679                 }
680                 "min_align_of" => {
681                     assert_eq!(args.len(), 0);
682                     let min_align = fx.layout_of(substs.type_at(0)).align.abi();
683                     let min_align = CValue::const_val(fx, usize_layout.ty, min_align as i64);
684                     ret.write_cvalue(fx, min_align);
685                 }
686                 _ if intrinsic.starts_with("unchecked_") => {
687                     assert_eq!(args.len(), 2);
688                     let bin_op = match intrinsic {
689                         "unchecked_div" => BinOp::Div,
690                         "unchecked_rem" => BinOp::Rem,
691                         "unchecked_shl" => BinOp::Shl,
692                         "unchecked_shr" => BinOp::Shr,
693                         _ => unimplemented!("intrinsic {}", intrinsic),
694                     };
695                     let res = match ret.layout().ty.sty {
696                         ty::Uint(_) => crate::base::trans_int_binop(
697                             fx,
698                             bin_op,
699                             args[0],
700                             args[1],
701                             ret.layout().ty,
702                             false,
703                         ),
704                         ty::Int(_) => crate::base::trans_int_binop(
705                             fx,
706                             bin_op,
707                             args[0],
708                             args[1],
709                             ret.layout().ty,
710                             true,
711                         ),
712                         _ => panic!(),
713                     };
714                     ret.write_cvalue(fx, res);
715                 }
716                 _ if intrinsic.ends_with("_with_overflow") => {
717                     assert_eq!(args.len(), 2);
718                     assert_eq!(args[0].layout().ty, args[1].layout().ty);
719                     let bin_op = match intrinsic {
720                         "add_with_overflow" => BinOp::Add,
721                         "sub_with_overflow" => BinOp::Sub,
722                         "mul_with_overflow" => BinOp::Mul,
723                         _ => unimplemented!("intrinsic {}", intrinsic),
724                     };
725                     let res = match args[0].layout().ty.sty {
726                         ty::Uint(_) => crate::base::trans_checked_int_binop(
727                             fx,
728                             bin_op,
729                             args[0],
730                             args[1],
731                             ret.layout().ty,
732                             false,
733                         ),
734                         ty::Int(_) => crate::base::trans_checked_int_binop(
735                             fx,
736                             bin_op,
737                             args[0],
738                             args[1],
739                             ret.layout().ty,
740                             true,
741                         ),
742                         _ => panic!(),
743                     };
744                     ret.write_cvalue(fx, res);
745                 }
746                 _ if intrinsic.starts_with("overflowing_") => {
747                     assert_eq!(args.len(), 2);
748                     assert_eq!(args[0].layout().ty, args[1].layout().ty);
749                     let bin_op = match intrinsic {
750                         "overflowing_add" => BinOp::Add,
751                         "overflowing_sub" => BinOp::Sub,
752                         "overflowing_mul" => BinOp::Mul,
753                         _ => unimplemented!("intrinsic {}", intrinsic),
754                     };
755                     let res = match args[0].layout().ty.sty {
756                         ty::Uint(_) => crate::base::trans_int_binop(
757                             fx,
758                             bin_op,
759                             args[0],
760                             args[1],
761                             ret.layout().ty,
762                             false,
763                         ),
764                         ty::Int(_) => crate::base::trans_int_binop(
765                             fx,
766                             bin_op,
767                             args[0],
768                             args[1],
769                             ret.layout().ty,
770                             true,
771                         ),
772                         _ => panic!(),
773                     };
774                     ret.write_cvalue(fx, res);
775                 }
776                 "offset" => {
777                     assert_eq!(args.len(), 2);
778                     let base = args[0].load_value(fx);
779                     let offset = args[1].load_value(fx);
780                     let res = fx.bcx.ins().iadd(base, offset);
781                     ret.write_cvalue(fx, CValue::ByVal(res, args[0].layout()));
782                 }
783                 "transmute" => {
784                     assert_eq!(args.len(), 1);
785                     let src_ty = substs.type_at(0);
786                     let dst_ty = substs.type_at(1);
787                     assert_eq!(args[0].layout().ty, src_ty);
788                     let addr = args[0].force_stack(fx);
789                     let dst_layout = fx.layout_of(dst_ty);
790                     ret.write_cvalue(fx, CValue::ByRef(addr, dst_layout))
791                 }
792                 "uninit" => {
793                     assert_eq!(args.len(), 0);
794                     let ty = substs.type_at(0);
795                     let layout = fx.layout_of(ty);
796                     let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
797                         kind: StackSlotKind::ExplicitSlot,
798                         size: layout.size.bytes() as u32,
799                         offset: None,
800                     });
801
802                     let uninit_place = CPlace::from_stack_slot(fx, stack_slot, ty);
803                     let uninit_val = uninit_place.to_cvalue(fx);
804                     ret.write_cvalue(fx, uninit_val);
805                 }
806                 "ctlz" | "ctlz_nonzero" => {
807                     assert_eq!(args.len(), 1);
808                     let arg = args[0].load_value(fx);
809                     let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
810                     ret.write_cvalue(fx, res);
811                 }
812                 "cttz" | "cttz_nonzero" => {
813                     assert_eq!(args.len(), 1);
814                     let arg = args[0].load_value(fx);
815                     let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
816                     ret.write_cvalue(fx, res);
817                 }
818                 "ctpop" => {
819                     assert_eq!(args.len(), 1);
820                     let arg = args[0].load_value(fx);
821                     let res = CValue::ByVal(fx.bcx.ins().popcnt(arg), args[0].layout());
822                     ret.write_cvalue(fx, res);
823                 }
824                 _ => unimpl!("unsupported intrinsic {}", intrinsic),
825             }
826
827             if let Some((_, dest)) = destination {
828                 let ret_ebb = fx.get_ebb(dest);
829                 fx.bcx.ins().jump(ret_ebb, &[]);
830             } else {
831                 fx.bcx.ins().trap(TrapCode::User(!0));
832             }
833             return true;
834         }
835     }
836
837     false
838 }