1 //! Codegen of a single function
3 use rustc_index::vec::IndexVec;
4 use rustc_middle::ty::adjustment::PointerCast;
8 pub(crate) fn trans_fn<'tcx>(
9 cx: &mut crate::CodegenCx<'tcx, impl Module>,
10 instance: Instance<'tcx>,
15 let mir = tcx.instance_mir(instance.def);
18 let (name, sig) = get_function_name_and_sig(tcx, cx.module.isa().triple(), instance, false);
19 let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
21 cx.cached_context.clear();
23 // Make the FunctionBuilder
24 let mut func_ctx = FunctionBuilderContext::new();
25 let mut func = std::mem::replace(&mut cx.cached_context.func, Function::new());
26 func.name = ExternalName::user(0, func_id.as_u32());
28 func.collect_debug_info();
30 let mut bcx = FunctionBuilder::new(&mut func, &mut func_ctx);
33 let start_block = bcx.create_block();
34 let block_map: IndexVec<BasicBlock, Block> = (0..mir.basic_blocks().len())
35 .map(|_| bcx.create_block())
39 let pointer_type = cx.module.target_config().pointer_type();
40 let clif_comments = crate::pretty_clif::CommentWriter::new(tcx, instance);
42 let mut fx = FunctionCx {
52 local_map: IndexVec::with_capacity(mir.local_decls.len()),
53 caller_location: None, // set by `codegen_fn_prelude`
54 cold_blocks: EntitySet::new(),
57 source_info_set: indexmap::IndexSet::new(),
63 let arg_uninhabited = fx.mir.args_iter().any(|arg| {
64 fx.layout_of(fx.monomorphize(&fx.mir.local_decls[arg].ty))
71 .append_block_params_for_function_params(fx.block_map[START_BLOCK]);
72 fx.bcx.switch_to_block(fx.block_map[START_BLOCK]);
73 crate::trap::trap_unreachable(&mut fx, "function has uninhabited argument");
75 tcx.sess.time("codegen clif ir", || {
76 tcx.sess.time("codegen prelude", || {
77 crate::abi::codegen_fn_prelude(&mut fx, start_block)
79 codegen_fn_content(&mut fx);
83 // Recover all necessary data from fx, before accessing func will prevent future access to it.
84 let instance = fx.instance;
85 let mut clif_comments = fx.clif_comments;
86 let source_info_set = fx.source_info_set;
87 let local_map = fx.local_map;
88 let cold_blocks = fx.cold_blocks;
90 // Store function in context
91 let context = &mut cx.cached_context;
94 crate::pretty_clif::write_clif_file(tcx, "unopt", None, instance, &context, &clif_comments);
97 verify_func(tcx, &clif_comments, &context.func);
99 // Perform rust specific optimizations
100 tcx.sess.time("optimize clif ir", || {
101 crate::optimize::optimize_function(
110 // If the return block is not reachable, then the SSA builder may have inserted an `iconst.i128`
111 // instruction, which doesn't have an encoding.
112 context.compute_cfg();
113 context.compute_domtree();
114 context.eliminate_unreachable_code(cx.module.isa()).unwrap();
115 context.dce(cx.module.isa()).unwrap();
118 let module = &mut cx.module;
119 tcx.sess.time("define function", || {
124 &mut cranelift_codegen::binemit::NullTrapSink {},
129 // Write optimized function to file for debugging
130 crate::pretty_clif::write_clif_file(
133 Some(cx.module.isa()),
139 // Define debuginfo for function
140 let isa = cx.module.isa();
141 let debug_context = &mut cx.debug_context;
142 let unwind_context = &mut cx.unwind_context;
143 tcx.sess.time("generate debug info", || {
144 if let Some(debug_context) = debug_context {
145 debug_context.define_function(
155 unwind_context.add_function(func_id, &context, isa);
158 // Clear context to make it usable for the next function
162 pub(crate) fn verify_func(
164 writer: &crate::pretty_clif::CommentWriter,
167 tcx.sess.time("verify clif ir", || {
168 let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder());
169 match cranelift_codegen::verify_function(&func, &flags) {
172 tcx.sess.err(&format!("{:?}", err));
173 let pretty_error = cranelift_codegen::print_errors::pretty_verifier_error(
176 Some(Box::new(writer)),
180 .fatal(&format!("cranelift verify error:\n{}", pretty_error));
186 fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Module>) {
187 crate::constant::check_constants(fx);
189 for (bb, bb_data) in fx.mir.basic_blocks().iter_enumerated() {
190 let block = fx.get_block(bb);
191 fx.bcx.switch_to_block(block);
193 if bb_data.is_cleanup {
194 // Unwinding after panicking is not supported
197 // FIXME once unwinding is supported uncomment next lines
198 // // Unwinding is unlikely to happen, so mark cleanup block's as cold.
199 // fx.cold_blocks.insert(block);
203 for stmt in &bb_data.statements {
204 fx.set_debug_loc(stmt.source_info);
205 trans_stmt(fx, block, stmt);
208 #[cfg(debug_assertions)]
210 let mut terminator_head = "\n".to_string();
214 .fmt_head(&mut terminator_head)
216 let inst = fx.bcx.func.layout.last_inst(block).unwrap();
217 fx.add_comment(inst, terminator_head);
220 fx.set_debug_loc(bb_data.terminator().source_info);
222 match &bb_data.terminator().kind {
223 TerminatorKind::Goto { target } => {
224 if let TerminatorKind::Return = fx.mir[*target].terminator().kind {
225 let mut can_immediately_return = true;
226 for stmt in &fx.mir[*target].statements {
227 if let StatementKind::StorageDead(_) = stmt.kind {
229 // FIXME Can sometimes happen, see rust-lang/rust#70531
230 can_immediately_return = false;
235 if can_immediately_return {
236 crate::abi::codegen_return(fx);
241 let block = fx.get_block(*target);
242 fx.bcx.ins().jump(block, &[]);
244 TerminatorKind::Return => {
245 crate::abi::codegen_return(fx);
247 TerminatorKind::Assert {
254 if !fx.tcx.sess.overflow_checks() {
255 if let mir::AssertKind::OverflowNeg(_) = *msg {
256 let target = fx.get_block(*target);
257 fx.bcx.ins().jump(target, &[]);
261 let cond = trans_operand(fx, cond).load_scalar(fx);
263 let target = fx.get_block(*target);
264 let failure = fx.bcx.create_block();
265 fx.cold_blocks.insert(failure);
268 fx.bcx.ins().brz(cond, failure, &[]);
270 fx.bcx.ins().brnz(cond, failure, &[]);
272 fx.bcx.ins().jump(target, &[]);
274 fx.bcx.switch_to_block(failure);
278 AssertKind::BoundsCheck { ref len, ref index } => {
279 let len = trans_operand(fx, len).load_scalar(fx);
280 let index = trans_operand(fx, index).load_scalar(fx);
282 .get_caller_location(bb_data.terminator().source_info.span)
287 rustc_hir::LangItem::PanicBoundsCheck,
288 &[index, len, location],
289 bb_data.terminator().source_info.span,
293 let msg_str = msg.description();
294 codegen_panic(fx, msg_str, bb_data.terminator().source_info.span);
299 TerminatorKind::SwitchInt {
304 let discr = trans_operand(fx, discr).load_scalar(fx);
306 if switch_ty.kind() == fx.tcx.types.bool.kind() {
307 assert_eq!(targets.iter().count(), 1);
308 let (then_value, then_block) = targets.iter().next().unwrap();
309 let then_block = fx.get_block(then_block);
310 let else_block = fx.get_block(targets.otherwise());
311 let test_zero = match then_value {
314 _ => unreachable!("{:?}", targets),
317 let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr);
318 let (discr, is_inverted) =
319 crate::optimize::peephole::maybe_unwrap_bool_not(&mut fx.bcx, discr);
320 let test_zero = if is_inverted { !test_zero } else { test_zero };
321 let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr);
323 crate::optimize::peephole::make_branchable_value(&mut fx.bcx, discr);
325 fx.bcx.ins().brz(discr, then_block, &[]);
326 fx.bcx.ins().jump(else_block, &[]);
328 fx.bcx.ins().brnz(discr, then_block, &[]);
329 fx.bcx.ins().jump(else_block, &[]);
332 let mut switch = ::cranelift_frontend::Switch::new();
333 for (value, block) in targets.iter() {
334 let block = fx.get_block(block);
335 switch.set_entry(value, block);
337 let otherwise_block = fx.get_block(targets.otherwise());
338 switch.emit(&mut fx.bcx, discr, otherwise_block);
341 TerminatorKind::Call {
349 fx.tcx.sess.time("codegen call", || {
350 crate::abi::codegen_terminator_call(
360 TerminatorKind::InlineAsm {
367 crate::inline_asm::codegen_inline_asm(
369 bb_data.terminator().source_info.span,
376 Some(destination) => {
377 let destination_block = fx.get_block(destination);
378 fx.bcx.ins().jump(destination_block, &[]);
381 crate::trap::trap_unreachable(
383 "[corruption] Returned from noreturn inline asm",
388 TerminatorKind::Resume | TerminatorKind::Abort => {
389 trap_unreachable(fx, "[corruption] Unwinding bb reached.");
391 TerminatorKind::Unreachable => {
392 trap_unreachable(fx, "[corruption] Hit unreachable code.");
394 TerminatorKind::Yield { .. }
395 | TerminatorKind::FalseEdge { .. }
396 | TerminatorKind::FalseUnwind { .. }
397 | TerminatorKind::DropAndReplace { .. }
398 | TerminatorKind::GeneratorDrop => {
399 bug!("shouldn't exist at trans {:?}", bb_data.terminator());
401 TerminatorKind::Drop {
406 let drop_place = trans_place(fx, *place);
407 crate::abi::codegen_drop(fx, bb_data.terminator().source_info.span, drop_place);
409 let target_block = fx.get_block(*target);
410 fx.bcx.ins().jump(target_block, &[]);
415 fx.bcx.seal_all_blocks();
420 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
421 #[allow(unused_variables)] cur_block: Block,
422 stmt: &Statement<'tcx>,
424 let _print_guard = crate::PrintOnPanic(|| format!("stmt {:?}", stmt));
426 fx.set_debug_loc(stmt.source_info);
428 #[cfg(false_debug_assertions)]
430 StatementKind::StorageLive(..) | StatementKind::StorageDead(..) => {} // Those are not very useful
432 let inst = fx.bcx.func.layout.last_inst(cur_block).unwrap();
433 fx.add_comment(inst, format!("{:?}", stmt));
438 StatementKind::SetDiscriminant {
442 let place = trans_place(fx, **place);
443 crate::discriminant::codegen_set_discriminant(fx, place, *variant_index);
445 StatementKind::Assign(to_place_and_rval) => {
446 let lval = trans_place(fx, to_place_and_rval.0);
447 let dest_layout = lval.layout();
448 match &to_place_and_rval.1 {
449 Rvalue::Use(operand) => {
450 let val = trans_operand(fx, operand);
451 lval.write_cvalue(fx, val);
453 Rvalue::Ref(_, _, place) | Rvalue::AddressOf(_, place) => {
454 let place = trans_place(fx, *place);
455 let ref_ = place.place_ref(fx, lval.layout());
456 lval.write_cvalue(fx, ref_);
458 Rvalue::ThreadLocalRef(def_id) => {
459 let val = crate::constant::codegen_tls_ref(fx, *def_id, lval.layout());
460 lval.write_cvalue(fx, val);
462 Rvalue::BinaryOp(bin_op, lhs, rhs) => {
463 let lhs = trans_operand(fx, lhs);
464 let rhs = trans_operand(fx, rhs);
466 let res = crate::num::codegen_binop(fx, *bin_op, lhs, rhs);
467 lval.write_cvalue(fx, res);
469 Rvalue::CheckedBinaryOp(bin_op, lhs, rhs) => {
470 let lhs = trans_operand(fx, lhs);
471 let rhs = trans_operand(fx, rhs);
473 let res = if !fx.tcx.sess.overflow_checks() {
475 crate::num::trans_int_binop(fx, *bin_op, lhs, rhs).load_scalar(fx);
476 let is_overflow = fx.bcx.ins().iconst(types::I8, 0);
477 CValue::by_val_pair(val, is_overflow, lval.layout())
479 crate::num::trans_checked_int_binop(fx, *bin_op, lhs, rhs)
482 lval.write_cvalue(fx, res);
484 Rvalue::UnaryOp(un_op, operand) => {
485 let operand = trans_operand(fx, operand);
486 let layout = operand.layout();
487 let val = operand.load_scalar(fx);
488 let res = match un_op {
489 UnOp::Not => match layout.ty.kind() {
491 let res = fx.bcx.ins().icmp_imm(IntCC::Equal, val, 0);
492 CValue::by_val(fx.bcx.ins().bint(types::I8, res), layout)
494 ty::Uint(_) | ty::Int(_) => {
495 CValue::by_val(fx.bcx.ins().bnot(val), layout)
497 _ => unreachable!("un op Not for {:?}", layout.ty),
499 UnOp::Neg => match layout.ty.kind() {
500 ty::Int(IntTy::I128) => {
501 // FIXME remove this case once ineg.i128 works
502 let zero = CValue::const_val(fx, layout, 0);
503 crate::num::trans_int_binop(fx, BinOp::Sub, zero, operand)
505 ty::Int(_) => CValue::by_val(fx.bcx.ins().ineg(val), layout),
506 ty::Float(_) => CValue::by_val(fx.bcx.ins().fneg(val), layout),
507 _ => unreachable!("un op Neg for {:?}", layout.ty),
510 lval.write_cvalue(fx, res);
512 Rvalue::Cast(CastKind::Pointer(PointerCast::ReifyFnPointer), operand, to_ty) => {
513 let from_ty = fx.monomorphize(&operand.ty(&fx.mir.local_decls, fx.tcx));
514 let to_layout = fx.layout_of(fx.monomorphize(to_ty));
515 match *from_ty.kind() {
516 ty::FnDef(def_id, substs) => {
517 let func_ref = fx.get_function_ref(
518 Instance::resolve_for_fn_ptr(
520 ParamEnv::reveal_all(),
525 .polymorphize(fx.tcx),
527 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
528 lval.write_cvalue(fx, CValue::by_val(func_addr, to_layout));
530 _ => bug!("Trying to ReifyFnPointer on non FnDef {:?}", from_ty),
533 Rvalue::Cast(CastKind::Pointer(PointerCast::UnsafeFnPointer), operand, to_ty)
534 | Rvalue::Cast(CastKind::Pointer(PointerCast::MutToConstPointer), operand, to_ty)
535 | Rvalue::Cast(CastKind::Pointer(PointerCast::ArrayToPointer), operand, to_ty) => {
536 let to_layout = fx.layout_of(fx.monomorphize(to_ty));
537 let operand = trans_operand(fx, operand);
538 lval.write_cvalue(fx, operand.cast_pointer_to(to_layout));
540 Rvalue::Cast(CastKind::Misc, operand, to_ty) => {
541 let operand = trans_operand(fx, operand);
542 let from_ty = operand.layout().ty;
543 let to_ty = fx.monomorphize(to_ty);
546 fx: &FunctionCx<'_, 'tcx, impl Module>,
549 ty.builtin_deref(true)
555 has_ptr_meta(fx.tcx, pointee_ty)
561 if is_fat_ptr(fx, from_ty) {
562 if is_fat_ptr(fx, to_ty) {
563 // fat-ptr -> fat-ptr
564 lval.write_cvalue(fx, operand.cast_pointer_to(dest_layout));
566 // fat-ptr -> thin-ptr
567 let (ptr, _extra) = operand.load_scalar_pair(fx);
568 lval.write_cvalue(fx, CValue::by_val(ptr, dest_layout))
570 } else if let ty::Adt(adt_def, _substs) = from_ty.kind() {
571 // enum -> discriminant value
572 assert!(adt_def.is_enum());
574 ty::Uint(_) | ty::Int(_) => {}
575 _ => unreachable!("cast adt {} -> {}", from_ty, to_ty),
578 use rustc_target::abi::{Int, TagEncoding, Variants};
580 match &operand.layout().variants {
581 Variants::Single { index } => {
585 .discriminant_for_variant(fx.tcx, *index)
587 let discr = if discr.ty.is_signed() {
588 rustc_middle::mir::interpret::sign_extend(
590 fx.layout_of(discr.ty).size,
596 let discr = CValue::const_val(fx, fx.layout_of(to_ty), discr);
597 lval.write_cvalue(fx, discr);
602 tag_encoding: TagEncoding::Direct,
605 let cast_to = fx.clif_type(dest_layout.ty).unwrap();
607 // Read the tag/niche-encoded discriminant from memory.
609 operand.value_field(fx, mir::Field::new(*tag_field));
610 let encoded_discr = encoded_discr.load_scalar(fx);
612 // Decode the discriminant (specifically if it's niche-encoded).
613 let signed = match tag.value {
614 Int(_, signed) => signed,
617 let val = clif_intcast(fx, encoded_discr, cast_to, signed);
618 let val = CValue::by_val(val, dest_layout);
619 lval.write_cvalue(fx, val);
621 Variants::Multiple { .. } => unreachable!(),
624 let to_clif_ty = fx.clif_type(to_ty).unwrap();
625 let from = operand.load_scalar(fx);
627 let res = clif_int_or_float_cast(
634 lval.write_cvalue(fx, CValue::by_val(res, dest_layout));
638 CastKind::Pointer(PointerCast::ClosureFnPointer(_)),
642 let operand = trans_operand(fx, operand);
643 match *operand.layout().ty.kind() {
644 ty::Closure(def_id, substs) => {
645 let instance = Instance::resolve_closure(
649 ty::ClosureKind::FnOnce,
651 .polymorphize(fx.tcx);
652 let func_ref = fx.get_function_ref(instance);
653 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
654 lval.write_cvalue(fx, CValue::by_val(func_addr, lval.layout()));
656 _ => bug!("{} cannot be cast to a fn ptr", operand.layout().ty),
659 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), operand, _to_ty) => {
660 let operand = trans_operand(fx, operand);
661 operand.unsize_value(fx, lval);
663 Rvalue::Discriminant(place) => {
664 let place = trans_place(fx, *place);
665 let value = place.to_cvalue(fx);
667 crate::discriminant::codegen_get_discriminant(fx, value, dest_layout);
668 lval.write_cvalue(fx, discr);
670 Rvalue::Repeat(operand, times) => {
671 let operand = trans_operand(fx, operand);
674 .eval(fx.tcx, ParamEnv::reveal_all())
676 .try_to_bits(fx.tcx.data_layout.pointer_size)
678 if fx.clif_type(operand.layout().ty) == Some(types::I8) {
679 let times = fx.bcx.ins().iconst(fx.pointer_type, times as i64);
680 // FIXME use emit_small_memset where possible
681 let addr = lval.to_ptr().get_addr(fx);
682 let val = operand.load_scalar(fx);
684 .call_memset(fx.cx.module.target_config(), addr, val, times);
686 let loop_block = fx.bcx.create_block();
687 let loop_block2 = fx.bcx.create_block();
688 let done_block = fx.bcx.create_block();
689 let index = fx.bcx.append_block_param(loop_block, fx.pointer_type);
690 let zero = fx.bcx.ins().iconst(fx.pointer_type, 0);
691 fx.bcx.ins().jump(loop_block, &[zero]);
693 fx.bcx.switch_to_block(loop_block);
694 let done = fx.bcx.ins().icmp_imm(IntCC::Equal, index, times as i64);
695 fx.bcx.ins().brnz(done, done_block, &[]);
696 fx.bcx.ins().jump(loop_block2, &[]);
698 fx.bcx.switch_to_block(loop_block2);
699 let to = lval.place_index(fx, index);
700 to.write_cvalue(fx, operand);
701 let index = fx.bcx.ins().iadd_imm(index, 1);
702 fx.bcx.ins().jump(loop_block, &[index]);
704 fx.bcx.switch_to_block(done_block);
708 Rvalue::Len(place) => {
709 let place = trans_place(fx, *place);
710 let usize_layout = fx.layout_of(fx.tcx.types.usize);
711 let len = codegen_array_len(fx, place);
712 lval.write_cvalue(fx, CValue::by_val(len, usize_layout));
714 Rvalue::NullaryOp(NullOp::Box, content_ty) => {
715 let usize_type = fx.clif_type(fx.tcx.types.usize).unwrap();
716 let content_ty = fx.monomorphize(content_ty);
717 let layout = fx.layout_of(content_ty);
718 let llsize = fx.bcx.ins().iconst(usize_type, layout.size.bytes() as i64);
722 .iconst(usize_type, layout.align.abi.bytes() as i64);
723 let box_layout = fx.layout_of(fx.tcx.mk_box(content_ty));
726 let def_id = match fx
729 .require(rustc_hir::LangItem::ExchangeMalloc)
735 .fatal(&format!("allocation of `{}` {}", box_layout.ty, s));
738 let instance = ty::Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx);
739 let func_ref = fx.get_function_ref(instance);
740 let call = fx.bcx.ins().call(func_ref, &[llsize, llalign]);
741 let ptr = fx.bcx.inst_results(call)[0];
742 lval.write_cvalue(fx, CValue::by_val(ptr, box_layout));
744 Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
748 .is_sized(fx.tcx.at(stmt.source_info.span), ParamEnv::reveal_all()));
749 let ty_size = fx.layout_of(fx.monomorphize(ty)).size.bytes();
751 CValue::const_val(fx, fx.layout_of(fx.tcx.types.usize), ty_size.into());
752 lval.write_cvalue(fx, val);
754 Rvalue::Aggregate(kind, operands) => match **kind {
755 AggregateKind::Array(_ty) => {
756 for (i, operand) in operands.into_iter().enumerate() {
757 let operand = trans_operand(fx, operand);
758 let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64);
759 let to = lval.place_index(fx, index);
760 to.write_cvalue(fx, operand);
763 _ => unreachable!("shouldn't exist at trans {:?}", to_place_and_rval.1),
767 StatementKind::StorageLive(_)
768 | StatementKind::StorageDead(_)
770 | StatementKind::FakeRead(..)
771 | StatementKind::Retag { .. }
772 | StatementKind::AscribeUserType(..) => {}
774 StatementKind::LlvmInlineAsm(asm) => {
775 use rustc_span::symbol::Symbol;
781 let rustc_hir::LlvmInlineAsmInner {
782 asm: asm_code, // Name
783 outputs: output_names, // Vec<LlvmInlineAsmOutput>
784 inputs: input_names, // Vec<Name>
785 clobbers, // Vec<Name>
791 match asm_code.as_str().trim() {
795 "mov %rbx, %rsi\n cpuid\n xchg %rbx, %rsi" => {
798 &[Symbol::intern("{eax}"), Symbol::intern("{ecx}")]
800 assert_eq!(output_names.len(), 4);
801 for (i, c) in (&["={eax}", "={esi}", "={ecx}", "={edx}"])
805 assert_eq!(&output_names[i].constraint.as_str(), c);
806 assert!(!output_names[i].is_rw);
807 assert!(!output_names[i].is_indirect);
810 assert_eq!(clobbers, &[]);
813 assert!(!alignstack);
815 assert_eq!(inputs.len(), 2);
816 let leaf = trans_operand(fx, &inputs[0].1).load_scalar(fx); // %eax
817 let subleaf = trans_operand(fx, &inputs[1].1).load_scalar(fx); // %ecx
819 let (eax, ebx, ecx, edx) =
820 crate::intrinsics::codegen_cpuid_call(fx, leaf, subleaf);
822 assert_eq!(outputs.len(), 4);
823 trans_place(fx, outputs[0])
824 .write_cvalue(fx, CValue::by_val(eax, fx.layout_of(fx.tcx.types.u32)));
825 trans_place(fx, outputs[1])
826 .write_cvalue(fx, CValue::by_val(ebx, fx.layout_of(fx.tcx.types.u32)));
827 trans_place(fx, outputs[2])
828 .write_cvalue(fx, CValue::by_val(ecx, fx.layout_of(fx.tcx.types.u32)));
829 trans_place(fx, outputs[3])
830 .write_cvalue(fx, CValue::by_val(edx, fx.layout_of(fx.tcx.types.u32)));
833 assert_eq!(input_names, &[Symbol::intern("{ecx}")]);
835 assert_eq!(output_names.len(), 2);
836 for (i, c) in (&["={eax}", "={edx}"]).iter().enumerate() {
837 assert_eq!(&output_names[i].constraint.as_str(), c);
838 assert!(!output_names[i].is_rw);
839 assert!(!output_names[i].is_indirect);
842 assert_eq!(clobbers, &[]);
845 assert!(!alignstack);
847 crate::trap::trap_unimplemented(fx, "_xgetbv arch intrinsic is not supported");
849 // ___chkstk, ___chkstk_ms and __alloca are only used on Windows
852 .symbol_name(fx.instance)
854 .starts_with("___chkstk") =>
856 crate::trap::trap_unimplemented(fx, "Stack probes are not supported");
858 _ if fx.tcx.symbol_name(fx.instance).name == "__alloca" => {
859 crate::trap::trap_unimplemented(fx, "Alloca is not supported");
861 // Used in sys::windows::abort_internal
863 crate::trap::trap_unimplemented(fx, "Windows abort");
868 .span_fatal(stmt.source_info.span, "Inline assembly is not supported"),
871 StatementKind::Coverage { .. } => fx.tcx.sess.fatal("-Zcoverage is unimplemented"),
875 fn codegen_array_len<'tcx>(
876 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
879 match *place.layout().ty.kind() {
880 ty::Array(_elem_ty, len) => {
883 .eval(fx.tcx, ParamEnv::reveal_all())
884 .eval_usize(fx.tcx, ParamEnv::reveal_all()) as i64;
885 fx.bcx.ins().iconst(fx.pointer_type, len)
887 ty::Slice(_elem_ty) => place
888 .to_ptr_maybe_unsized()
890 .expect("Length metadata for slice place"),
891 _ => bug!("Rvalue::Len({:?})", place),
895 pub(crate) fn trans_place<'tcx>(
896 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
899 let mut cplace = fx.get_local_place(place.local);
901 for elem in place.projection {
903 PlaceElem::Deref => {
904 cplace = cplace.place_deref(fx);
906 PlaceElem::Field(field, _ty) => {
907 cplace = cplace.place_field(fx, field);
909 PlaceElem::Index(local) => {
910 let index = fx.get_local_place(local).to_cvalue(fx).load_scalar(fx);
911 cplace = cplace.place_index(fx, index);
913 PlaceElem::ConstantIndex {
918 let offset: u64 = offset;
919 let index = if !from_end {
920 fx.bcx.ins().iconst(fx.pointer_type, offset as i64)
922 let len = codegen_array_len(fx, cplace);
923 fx.bcx.ins().iadd_imm(len, -(offset as i64))
925 cplace = cplace.place_index(fx, index);
927 PlaceElem::Subslice { from, to, from_end } => {
928 // These indices are generated by slice patterns.
929 // slice[from:-to] in Python terms.
931 let from: u64 = from;
934 match cplace.layout().ty.kind() {
935 ty::Array(elem_ty, _len) => {
936 assert!(!from_end, "array subslices are never `from_end`");
937 let elem_layout = fx.layout_of(elem_ty);
938 let ptr = cplace.to_ptr();
939 cplace = CPlace::for_ptr(
940 ptr.offset_i64(fx, elem_layout.size.bytes() as i64 * (from as i64)),
941 fx.layout_of(fx.tcx.mk_array(elem_ty, u64::from(to) - u64::from(from))),
944 ty::Slice(elem_ty) => {
945 assert!(from_end, "slice subslices should be `from_end`");
946 let elem_layout = fx.layout_of(elem_ty);
947 let (ptr, len) = cplace.to_ptr_maybe_unsized();
948 let len = len.unwrap();
949 cplace = CPlace::for_ptr_with_extra(
950 ptr.offset_i64(fx, elem_layout.size.bytes() as i64 * (from as i64)),
951 fx.bcx.ins().iadd_imm(len, -(from as i64 + to as i64)),
958 PlaceElem::Downcast(_adt_def, variant) => {
959 cplace = cplace.downcast_variant(fx, variant);
967 pub(crate) fn trans_operand<'tcx>(
968 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
969 operand: &Operand<'tcx>,
972 Operand::Move(place) | Operand::Copy(place) => {
973 let cplace = trans_place(fx, *place);
976 Operand::Constant(const_) => crate::constant::trans_constant(fx, const_),
980 pub(crate) fn codegen_panic<'tcx>(
981 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
985 let location = fx.get_caller_location(span).load_scalar(fx);
987 let msg_ptr = fx.anonymous_str("assert", msg_str);
991 .iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap());
992 let args = [msg_ptr, msg_len, location];
994 codegen_panic_inner(fx, rustc_hir::LangItem::Panic, &args, span);
997 pub(crate) fn codegen_panic_inner<'tcx>(
998 fx: &mut FunctionCx<'_, 'tcx, impl Module>,
999 lang_item: rustc_hir::LangItem,
1007 .unwrap_or_else(|s| fx.tcx.sess.span_fatal(span, &s));
1009 let instance = Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx);
1010 let symbol_name = fx.tcx.symbol_name(instance).name;
1014 vec![fx.pointer_type, fx.pointer_type, fx.pointer_type],
1019 crate::trap::trap_unreachable(fx, "panic lang item returned");