4 use rustc_target::spec::abi::Abi;
16 fn get_param_ty(self, fx: &FunctionCx<impl Backend>) -> Type {
18 PassMode::NoPass => unimplemented!("pass mode nopass"),
19 PassMode::ByVal(cton_type) => cton_type,
20 PassMode::ByRef => fx.module.pointer_type(),
25 fn get_pass_mode<'a, 'tcx: 'a>(
26 tcx: TyCtxt<'a, 'tcx, 'tcx>,
32 !tcx.layout_of(ParamEnv::reveal_all().and(ty))
36 if ty.sty == tcx.mk_nil().sty {
43 } else if let Some(ret_ty) = crate::common::cton_type_from_ty(tcx, ty) {
44 PassMode::ByVal(ret_ty)
47 unimplemented!("Non scalars are not yet supported for \"C\" abi");
53 fn adjust_arg_for_abi<'a, 'tcx: 'a>(
54 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
58 match get_pass_mode(fx.tcx, sig.abi, arg.layout().ty, false) {
59 PassMode::NoPass => unimplemented!("pass mode nopass"),
60 PassMode::ByVal(_) => arg.load_value(fx),
61 PassMode::ByRef => arg.force_stack(fx),
65 pub fn cton_sig_from_fn_ty<'a, 'tcx: 'a>(
66 tcx: TyCtxt<'a, 'tcx, 'tcx>,
69 let sig = ty_fn_sig(tcx, fn_ty);
70 assert!(!sig.variadic, "Variadic function are not yet supported");
71 let (call_conv, inputs, output): (CallConv, Vec<Ty>, Ty) = match sig.abi {
72 Abi::Rust => (CallConv::Fast, sig.inputs().to_vec(), sig.output()),
73 Abi::C => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
75 assert_eq!(sig.inputs().len(), 2);
76 let extra_args = match sig.inputs().last().unwrap().sty {
77 ty::Tuple(ref tupled_arguments) => tupled_arguments,
78 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
80 let mut inputs: Vec<Ty> = vec![sig.inputs()[0]];
81 inputs.extend(extra_args.into_iter());
82 (CallConv::Fast, inputs, sig.output())
84 Abi::System => bug!("system abi should be selected elsewhere"),
85 Abi::RustIntrinsic => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
86 _ => unimplemented!("unsupported abi {:?}", sig.abi),
91 .filter_map(|ty| match get_pass_mode(tcx, sig.abi, ty, false) {
92 PassMode::ByVal(cton_ty) => Some(cton_ty),
93 PassMode::NoPass => unimplemented!("pass mode nopass"),
94 PassMode::ByRef => Some(pointer_ty(tcx)),
97 let (params, returns) = match get_pass_mode(tcx, sig.abi, output, true) {
98 PassMode::NoPass => (inputs.map(AbiParam::new).collect(), vec![]),
99 PassMode::ByVal(ret_ty) => (
100 inputs.map(AbiParam::new).collect(),
101 vec![AbiParam::new(ret_ty)],
105 Some(pointer_ty(tcx)) // First param is place to put return val
122 fn ty_fn_sig<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> ty::FnSig<'tcx> {
123 let sig = match ty.sty {
125 // Shims currently have type TyFnPtr. Not sure this should remain.
126 ty::FnPtr(_) => ty.fn_sig(tcx),
127 ty::Closure(def_id, substs) => {
128 let sig = substs.closure_sig(def_id, tcx);
130 let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
131 sig.map_bound(|sig| tcx.mk_fn_sig(
132 iter::once(*env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
139 ty::Generator(def_id, substs, _) => {
140 let sig = substs.poly_sig(def_id, tcx);
142 let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
143 let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
145 sig.map_bound(|sig| {
146 let state_did = tcx.lang_items().gen_state().unwrap();
147 let state_adt_ref = tcx.adt_def(state_did);
148 let state_substs = tcx.intern_substs(&[
150 sig.return_ty.into(),
152 let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
154 tcx.mk_fn_sig(iter::once(env_ty),
157 hir::Unsafety::Normal,
162 _ => bug!("unexpected type {:?} to ty_fn_sig", ty)
164 tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &sig)
167 pub fn get_function_name_and_sig<'a, 'tcx>(
168 tcx: TyCtxt<'a, 'tcx, 'tcx>,
169 inst: Instance<'tcx>,
170 ) -> (String, Signature) {
171 assert!(!inst.substs.needs_infer() && !inst.substs.has_param_types());
172 let fn_ty = inst.ty(tcx);
173 let sig = cton_sig_from_fn_ty(tcx, fn_ty);
174 (tcx.symbol_name(inst).as_str().to_string(), sig)
177 impl<'a, 'tcx: 'a, B: Backend + 'a> FunctionCx<'a, 'tcx, B> {
178 /// Instance must be monomorphized
179 pub fn get_function_id(&mut self, inst: Instance<'tcx>) -> FuncId {
180 let (name, sig) = get_function_name_and_sig(self.tcx, inst);
182 .declare_function(&name, Linkage::Import, &sig)
186 /// Instance must be monomorphized
187 pub fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
188 let func_id = self.get_function_id(inst);
190 .declare_func_in_func(func_id, &mut self.bcx.func)
196 input_tys: Vec<types::Type>,
197 output_ty: Option<types::Type>,
200 let sig = Signature {
201 params: input_tys.iter().cloned().map(AbiParam::new).collect(),
203 .map(|output_ty| vec![AbiParam::new(output_ty)])
204 .unwrap_or(Vec::new()),
205 call_conv: CallConv::SystemV,
209 .declare_function(&name, Linkage::Import, &sig)
213 .declare_func_in_func(func_id, &mut self.bcx.func);
214 let call_inst = self.bcx.ins().call(func_ref, args);
215 if output_ty.is_none() {
218 let results = self.bcx.inst_results(call_inst);
219 assert_eq!(results.len(), 1);
226 args: &[CValue<'tcx>],
229 let (input_tys, args): (Vec<_>, Vec<_>) = args
233 self.cton_type(arg.layout().ty).unwrap(),
234 arg.load_value(self),
237 let return_layout = self.layout_of(return_ty);
238 let return_ty = if let ty::Tuple(tup) = return_ty.sty {
240 bug!("easy_call( (...) -> <non empty tuple> ) is not allowed");
244 Some(self.cton_type(return_ty).unwrap())
246 if let Some(val) = self.lib_call(name, input_tys, return_ty, &args) {
247 CValue::ByVal(val, return_layout)
250 self.bcx.ins().iconst(self.module.pointer_type(), 0),
256 fn self_sig(&self) -> FnSig<'tcx> {
257 ty_fn_sig(self.tcx, self.instance.ty(self.tcx))
260 fn return_type(&self) -> Ty<'tcx> {
261 self.self_sig().output()
265 pub fn codegen_fn_prelude<'a, 'tcx: 'a>(
266 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
269 let ssa_analyzed = crate::analyze::analyze(fx);
271 let ret_layout = fx.layout_of(fx.return_type());
272 let output_pass_mode = get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true);
273 let ret_param = match output_pass_mode {
274 PassMode::NoPass => None,
275 PassMode::ByVal(_) => None,
276 PassMode::ByRef => Some(fx.bcx.append_ebb_param(start_ebb, fx.module.pointer_type())),
288 let arg_ty = fx.monomorphize(&fx.mir.local_decls[local].ty);
290 // Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
291 if Some(local) == fx.mir.spread_arg {
292 // This argument (e.g. the last argument in the "rust-call" ABI)
293 // is a tuple that was spread at the ABI level and now we have
294 // to reconstruct it into a tuple local variable, from multiple
295 // individual function arguments.
297 let tupled_arg_tys = match arg_ty.sty {
298 ty::Tuple(ref tys) => tys,
299 _ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
302 let mut ebb_params = Vec::new();
303 for arg_ty in tupled_arg_tys.iter() {
305 get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
306 ebb_params.push(fx.bcx.append_ebb_param(start_ebb, cton_type));
309 (local, ArgKind::Spread(ebb_params), arg_ty)
312 get_pass_mode(fx.tcx, fx.self_sig().abi, arg_ty, false).get_param_ty(fx);
315 ArgKind::Normal(fx.bcx.append_ebb_param(start_ebb, cton_type)),
319 }).collect::<Vec<(Local, ArgKind, Ty)>>();
321 fx.bcx.switch_to_block(start_ebb);
323 fx.top_nop = Some(fx.bcx.ins().nop());
324 fx.add_global_comment(format!("ssa {:?}", ssa_analyzed));
326 match output_pass_mode {
327 PassMode::NoPass => {
328 let null = fx.bcx.ins().iconst(fx.module.pointer_type(), 0);
329 //unimplemented!("pass mode nopass");
332 CPlace::Addr(null, None, fx.layout_of(fx.return_type())),
335 PassMode::ByVal(ret_ty) => {
336 fx.bcx.declare_var(mir_var(RETURN_PLACE), ret_ty);
338 .insert(RETURN_PLACE, CPlace::Var(RETURN_PLACE, ret_layout));
343 CPlace::Addr(ret_param.unwrap(), None, ret_layout),
348 for (local, arg_kind, ty) in func_params {
349 let layout = fx.layout_of(ty);
351 if let ArgKind::Normal(ebb_param) = arg_kind {
355 .contains(crate::analyze::Flags::NOT_SSA)
358 .declare_var(mir_var(local), fx.cton_type(ty).unwrap());
359 match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false) {
360 PassMode::NoPass => unimplemented!("pass mode nopass"),
361 PassMode::ByVal(_) => fx.bcx.def_var(mir_var(local), ebb_param),
363 let val = CValue::ByRef(ebb_param, fx.layout_of(ty)).load_value(fx);
364 fx.bcx.def_var(mir_var(local), val);
367 fx.local_map.insert(local, CPlace::Var(local, layout));
372 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
373 kind: StackSlotKind::ExplicitSlot,
374 size: layout.size.bytes() as u32,
378 let place = CPlace::from_stack_slot(fx, stack_slot, ty);
381 ArgKind::Normal(ebb_param) => match get_pass_mode(fx.tcx, fx.self_sig().abi, ty, false)
383 PassMode::NoPass => unimplemented!("pass mode nopass"),
384 PassMode::ByVal(_) => {
385 place.write_cvalue(fx, CValue::ByVal(ebb_param, place.layout()))
387 PassMode::ByRef => place.write_cvalue(fx, CValue::ByRef(ebb_param, place.layout())),
389 ArgKind::Spread(ebb_params) => {
390 for (i, ebb_param) in ebb_params.into_iter().enumerate() {
391 let sub_place = place.place_field(fx, mir::Field::new(i));
392 match get_pass_mode(fx.tcx, fx.self_sig().abi, sub_place.layout().ty, false) {
393 PassMode::NoPass => unimplemented!("pass mode nopass"),
394 PassMode::ByVal(_) => {
395 sub_place.write_cvalue(fx, CValue::ByVal(ebb_param, sub_place.layout()))
398 sub_place.write_cvalue(fx, CValue::ByRef(ebb_param, sub_place.layout()))
404 fx.local_map.insert(local, place);
407 for local in fx.mir.vars_and_temps_iter() {
408 let ty = fx.mir.local_decls[local].ty;
409 let layout = fx.layout_of(ty);
411 let place = if ssa_analyzed
414 .contains(crate::analyze::Flags::NOT_SSA)
416 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
417 kind: StackSlotKind::ExplicitSlot,
418 size: layout.size.bytes() as u32,
421 CPlace::from_stack_slot(fx, stack_slot, ty)
424 .declare_var(mir_var(local), fx.cton_type(ty).unwrap());
425 CPlace::Var(local, layout)
428 fx.local_map.insert(local, place);
433 .jump(*fx.ebb_map.get(&START_BLOCK).unwrap(), &[]);
436 pub fn codegen_terminator_call<'a, 'tcx: 'a>(
437 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
438 func: &Operand<'tcx>,
439 args: &[Operand<'tcx>],
440 destination: &Option<(Place<'tcx>, BasicBlock)>,
442 let fn_ty = fx.monomorphize(&func.ty(&fx.mir.local_decls, fx.tcx));
443 let sig = ty_fn_sig(fx.tcx, fn_ty);
445 // Unpack arguments tuple for closures
446 let args = if sig.abi == Abi::RustCall {
447 assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
448 let self_arg = trans_operand(fx, &args[0]);
449 let pack_arg = trans_operand(fx, &args[1]);
450 let mut args = Vec::new();
452 match pack_arg.layout().ty.sty {
453 ty::Tuple(ref tupled_arguments) => {
454 for (i, _) in tupled_arguments.iter().enumerate() {
455 args.push(pack_arg.value_field(fx, mir::Field::new(i)));
458 _ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
463 .map(|arg| trans_operand(fx, arg))
467 let destination = destination
469 .map(|&(ref place, bb)| (trans_place(fx, place), bb));
471 if !codegen_intrinsic_call(fx, fn_ty, &args, destination) {
477 destination.map(|(place, _)| place),
480 if let Some((_, dest)) = destination {
481 let ret_ebb = fx.get_ebb(dest);
482 fx.bcx.ins().jump(ret_ebb, &[]);
484 fx.bcx.ins().trap(TrapCode::User(!0));
489 pub fn codegen_call_inner<'a, 'tcx: 'a>(
490 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
491 func: Option<&Operand<'tcx>>,
493 args: Vec<CValue<'tcx>>,
494 ret_place: Option<CPlace<'tcx>>,
496 let sig = ty_fn_sig(fx.tcx, fn_ty);
498 let ret_layout = fx.layout_of(sig.output());
500 let output_pass_mode = get_pass_mode(fx.tcx, sig.abi, sig.output(), true);
501 let return_ptr = match output_pass_mode {
502 PassMode::NoPass => None,
503 PassMode::ByRef => match ret_place {
504 Some(ret_place) => Some(ret_place.expect_addr()),
505 None => Some(fx.bcx.ins().iconst(fx.module.pointer_type(), 0)),
507 PassMode::ByVal(_) => None,
510 let instance = match fn_ty.sty {
511 ty::FnDef(def_id, substs) => {
512 Some(Instance::resolve(fx.tcx, ParamEnv::reveal_all(), def_id, substs).unwrap())
517 let func_ref: Option<Value>; // Indirect call target
520 if let Some(Instance {
521 def: InstanceDef::Virtual(_, idx),
525 let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
526 func_ref = Some(method);
529 func_ref = if instance.is_none() {
530 let func = trans_operand(fx, func.expect("indirect call without func Operand"));
531 Some(func.load_value(fx))
536 args.get(0).map(|arg| adjust_arg_for_abi(fx, sig, *arg))
540 let call_args: Vec<Value> = return_ptr
546 .map(|arg| adjust_arg_for_abi(fx, sig, arg)),
547 ).collect::<Vec<_>>();
549 let sig = fx.bcx.import_signature(cton_sig_from_fn_ty(fx.tcx, fn_ty));
550 let call_inst = if let Some(func_ref) = func_ref {
551 fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
553 let func_ref = fx.get_function_ref(instance.expect("non-indirect call on non-FnDef type"));
554 fx.bcx.ins().call(func_ref, &call_args)
557 match output_pass_mode {
558 PassMode::NoPass => {}
559 PassMode::ByVal(_) => {
560 if let Some(ret_place) = ret_place {
561 let results = fx.bcx.inst_results(call_inst);
562 ret_place.write_cvalue(fx, CValue::ByVal(results[0], ret_layout));
565 PassMode::ByRef => {}
569 pub fn codegen_return(fx: &mut FunctionCx<impl Backend>) {
570 match get_pass_mode(fx.tcx, fx.self_sig().abi, fx.return_type(), true) {
571 PassMode::NoPass | PassMode::ByRef => {
572 fx.bcx.ins().return_(&[]);
574 PassMode::ByVal(_) => {
575 let place = fx.get_local_place(RETURN_PLACE);
576 let ret_val = place.to_cvalue(fx).load_value(fx);
577 fx.bcx.ins().return_(&[ret_val]);
582 fn codegen_intrinsic_call<'a, 'tcx: 'a>(
583 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
585 args: &[CValue<'tcx>],
586 destination: Option<(CPlace<'tcx>, BasicBlock)>,
588 if let ty::FnDef(def_id, substs) = fn_ty.sty {
589 let sig = ty_fn_sig(fx.tcx, fn_ty);
591 if sig.abi == Abi::RustIntrinsic {
592 let intrinsic = fx.tcx.item_name(def_id).as_str();
593 let intrinsic = &intrinsic[..];
595 let ret = match destination {
596 Some((place, _)) => place,
598 // Insert non returning intrinsics here
601 fx.bcx.ins().trap(TrapCode::User(!0 - 1));
604 fx.bcx.ins().trap(TrapCode::User(!0 - 1));
606 _ => unimplemented!("unsupported instrinsic {}", intrinsic),
612 let nil_ty = fx.tcx.mk_nil();
613 let u64_layout = fx.layout_of(fx.tcx.types.u64);
614 let usize_layout = fx.layout_of(fx.tcx.types.usize);
618 assert_eq!(args.len(), 1);
621 assert_eq!(args.len(), 2);
622 let base = args[0].load_value(fx);
623 let offset = args[1].load_value(fx);
624 let res = fx.bcx.ins().iadd(base, offset);
625 let res = CValue::ByVal(res, ret.layout());
626 ret.write_cvalue(fx, res);
628 "likely" | "unlikely" => {
629 assert_eq!(args.len(), 1);
630 ret.write_cvalue(fx, args[0]);
632 "copy" | "copy_nonoverlapping" => {
633 let elem_ty = substs.type_at(0);
634 let elem_size: u64 = fx.layout_of(elem_ty).size.bytes();
638 .iconst(fx.module.pointer_type(), elem_size as i64);
639 assert_eq!(args.len(), 3);
642 let count = args[2].load_value(fx);
643 let byte_amount = fx.bcx.ins().imul(count, elem_size);
646 &[dst, src, CValue::ByVal(byte_amount, usize_layout)],
650 "discriminant_value" => {
651 assert_eq!(args.len(), 1);
652 let discr = crate::base::trans_get_discriminant(fx, args[0], ret.layout());
653 ret.write_cvalue(fx, discr);
656 assert_eq!(args.len(), 0);
657 let size_of = fx.layout_of(substs.type_at(0)).size.bytes();
658 let size_of = CValue::const_val(fx, usize_layout.ty, size_of as i64);
659 ret.write_cvalue(fx, size_of);
662 assert_eq!(args.len(), 1);
663 let size = match &substs.type_at(0).sty {
665 let len = args[0].load_value_pair(fx).1;
666 let elem_size = fx.layout_of(elem).size.bytes();
667 fx.bcx.ins().imul_imm(len, elem_size as i64)
669 ty::Dynamic(..) => crate::vtable::size_of_obj(fx, args[0]),
670 ty => unimplemented!("size_of_val for {:?}", ty),
672 ret.write_cvalue(fx, CValue::ByVal(size, usize_layout));
675 assert_eq!(args.len(), 0);
676 let type_id = fx.tcx.type_id_hash(substs.type_at(0));
677 let type_id = CValue::const_val(fx, u64_layout.ty, type_id as i64);
678 ret.write_cvalue(fx, type_id);
681 assert_eq!(args.len(), 0);
682 let min_align = fx.layout_of(substs.type_at(0)).align.abi();
683 let min_align = CValue::const_val(fx, usize_layout.ty, min_align as i64);
684 ret.write_cvalue(fx, min_align);
686 _ if intrinsic.starts_with("unchecked_") => {
687 assert_eq!(args.len(), 2);
688 let bin_op = match intrinsic {
689 "unchecked_div" => BinOp::Div,
690 "unchecked_rem" => BinOp::Rem,
691 "unchecked_shl" => BinOp::Shl,
692 "unchecked_shr" => BinOp::Shr,
693 _ => unimplemented!("intrinsic {}", intrinsic),
695 let res = match ret.layout().ty.sty {
696 ty::Uint(_) => crate::base::trans_int_binop(
704 ty::Int(_) => crate::base::trans_int_binop(
714 ret.write_cvalue(fx, res);
716 _ if intrinsic.ends_with("_with_overflow") => {
717 assert_eq!(args.len(), 2);
718 assert_eq!(args[0].layout().ty, args[1].layout().ty);
719 let bin_op = match intrinsic {
720 "add_with_overflow" => BinOp::Add,
721 "sub_with_overflow" => BinOp::Sub,
722 "mul_with_overflow" => BinOp::Mul,
723 _ => unimplemented!("intrinsic {}", intrinsic),
725 let res = match args[0].layout().ty.sty {
726 ty::Uint(_) => crate::base::trans_checked_int_binop(
734 ty::Int(_) => crate::base::trans_checked_int_binop(
744 ret.write_cvalue(fx, res);
746 _ if intrinsic.starts_with("overflowing_") => {
747 assert_eq!(args.len(), 2);
748 assert_eq!(args[0].layout().ty, args[1].layout().ty);
749 let bin_op = match intrinsic {
750 "overflowing_add" => BinOp::Add,
751 "overflowing_sub" => BinOp::Sub,
752 "overflowing_mul" => BinOp::Mul,
753 _ => unimplemented!("intrinsic {}", intrinsic),
755 let res = match args[0].layout().ty.sty {
756 ty::Uint(_) => crate::base::trans_int_binop(
764 ty::Int(_) => crate::base::trans_int_binop(
774 ret.write_cvalue(fx, res);
777 assert_eq!(args.len(), 2);
778 let base = args[0].load_value(fx);
779 let offset = args[1].load_value(fx);
780 let res = fx.bcx.ins().iadd(base, offset);
781 ret.write_cvalue(fx, CValue::ByVal(res, args[0].layout()));
784 assert_eq!(args.len(), 1);
785 let src_ty = substs.type_at(0);
786 let dst_ty = substs.type_at(1);
787 assert_eq!(args[0].layout().ty, src_ty);
788 let addr = args[0].force_stack(fx);
789 let dst_layout = fx.layout_of(dst_ty);
790 ret.write_cvalue(fx, CValue::ByRef(addr, dst_layout))
793 assert_eq!(args.len(), 0);
794 let ty = substs.type_at(0);
795 let layout = fx.layout_of(ty);
796 let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
797 kind: StackSlotKind::ExplicitSlot,
798 size: layout.size.bytes() as u32,
802 let uninit_place = CPlace::from_stack_slot(fx, stack_slot, ty);
803 let uninit_val = uninit_place.to_cvalue(fx);
804 ret.write_cvalue(fx, uninit_val);
806 "ctlz" | "ctlz_nonzero" => {
807 assert_eq!(args.len(), 1);
808 let arg = args[0].load_value(fx);
809 let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
810 ret.write_cvalue(fx, res);
812 "cttz" | "cttz_nonzero" => {
813 assert_eq!(args.len(), 1);
814 let arg = args[0].load_value(fx);
815 let res = CValue::ByVal(fx.bcx.ins().clz(arg), args[0].layout());
816 ret.write_cvalue(fx, res);
819 assert_eq!(args.len(), 1);
820 let arg = args[0].load_value(fx);
821 let res = CValue::ByVal(fx.bcx.ins().popcnt(arg), args[0].layout());
822 ret.write_cvalue(fx, res);
824 _ => unimpl!("unsupported intrinsic {}", intrinsic),
827 if let Some((_, dest)) = destination {
828 let ret_ebb = fx.get_ebb(dest);
829 fx.bcx.ins().jump(ret_ebb, &[]);
831 fx.bcx.ins().trap(TrapCode::User(!0));