1 use rustc_index::vec::IndexVec;
2 use rustc_middle::ty::adjustment::PointerCast;
6 pub(crate) fn trans_fn<'tcx, B: Backend + 'static>(
7 cx: &mut crate::CodegenCx<'tcx, B>,
8 instance: Instance<'tcx>,
13 let mir = tcx.instance_mir(instance.def);
16 let (name, sig) = get_function_name_and_sig(tcx, cx.module.isa().triple(), instance, false);
17 let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
19 cx.cached_context.clear();
21 // Make the FunctionBuilder
22 let mut func_ctx = FunctionBuilderContext::new();
23 let mut func = std::mem::replace(&mut cx.cached_context.func, Function::new());
24 func.name = ExternalName::user(0, func_id.as_u32());
26 func.collect_debug_info();
28 let mut bcx = FunctionBuilder::new(&mut func, &mut func_ctx);
31 let start_block = bcx.create_block();
32 let block_map: IndexVec<BasicBlock, Block> = (0..mir.basic_blocks().len())
33 .map(|_| bcx.create_block())
37 let pointer_type = cx.module.target_config().pointer_type();
38 let clif_comments = crate::pretty_clif::CommentWriter::new(tcx, instance);
40 let mut fx = FunctionCx {
50 local_map: FxHashMap::with_capacity_and_hasher(mir.local_decls.len(), Default::default()),
51 caller_location: None, // set by `codegen_fn_prelude`
52 cold_blocks: EntitySet::new(),
55 source_info_set: indexmap::IndexSet::new(),
61 let arg_uninhabited = fx.mir.args_iter().any(|arg| {
62 fx.layout_of(fx.monomorphize(&fx.mir.local_decls[arg].ty))
69 .append_block_params_for_function_params(fx.block_map[START_BLOCK]);
70 fx.bcx.switch_to_block(fx.block_map[START_BLOCK]);
71 crate::trap::trap_unreachable(&mut fx, "function has uninhabited argument");
73 tcx.sess.time("codegen clif ir", || {
74 tcx.sess.time("codegen prelude", || {
75 crate::abi::codegen_fn_prelude(&mut fx, start_block)
77 codegen_fn_content(&mut fx);
81 // Recover all necessary data from fx, before accessing func will prevent future access to it.
82 let instance = fx.instance;
83 let mut clif_comments = fx.clif_comments;
84 let source_info_set = fx.source_info_set;
85 let local_map = fx.local_map;
86 let cold_blocks = fx.cold_blocks;
88 // Store function in context
89 let context = &mut cx.cached_context;
92 crate::pretty_clif::write_clif_file(tcx, "unopt", None, instance, &context, &clif_comments);
95 verify_func(tcx, &clif_comments, &context.func);
97 // Perform rust specific optimizations
98 tcx.sess.time("optimize clif ir", || {
99 crate::optimize::optimize_function(
108 // If the return block is not reachable, then the SSA builder may have inserted a `iconst.i128`
109 // instruction, which doesn't have an encoding.
110 context.compute_cfg();
111 context.compute_domtree();
112 context.eliminate_unreachable_code(cx.module.isa()).unwrap();
113 context.dce(cx.module.isa()).unwrap();
116 let module = &mut cx.module;
117 tcx.sess.time("define function", || {
122 &mut cranelift_codegen::binemit::NullTrapSink {},
127 // Write optimized function to file for debugging
128 crate::pretty_clif::write_clif_file(
131 Some(cx.module.isa()),
137 // Define debuginfo for function
138 let isa = cx.module.isa();
139 let debug_context = &mut cx.debug_context;
140 let unwind_context = &mut cx.unwind_context;
141 tcx.sess.time("generate debug info", || {
142 if let Some(debug_context) = debug_context {
143 debug_context.define_function(
153 unwind_context.add_function(func_id, &context, isa);
156 // Clear context to make it usable for the next function
160 pub(crate) fn verify_func(
162 writer: &crate::pretty_clif::CommentWriter,
165 tcx.sess.time("verify clif ir", || {
166 let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder());
167 match cranelift_codegen::verify_function(&func, &flags) {
170 tcx.sess.err(&format!("{:?}", err));
171 let pretty_error = cranelift_codegen::print_errors::pretty_verifier_error(
174 Some(Box::new(writer)),
178 .fatal(&format!("cranelift verify error:\n{}", pretty_error));
184 fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Backend>) {
185 crate::constant::check_constants(fx);
187 for (bb, bb_data) in fx.mir.basic_blocks().iter_enumerated() {
188 let block = fx.get_block(bb);
189 fx.bcx.switch_to_block(block);
191 if bb_data.is_cleanup {
192 // Unwinding after panicking is not supported
195 // FIXME once unwinding is supported uncomment next lines
196 // // Unwinding is unlikely to happen, so mark cleanup block's as cold.
197 // fx.cold_blocks.insert(block);
201 for stmt in &bb_data.statements {
202 fx.set_debug_loc(stmt.source_info);
203 trans_stmt(fx, block, stmt);
206 #[cfg(debug_assertions)]
208 let mut terminator_head = "\n".to_string();
212 .fmt_head(&mut terminator_head)
214 let inst = fx.bcx.func.layout.last_inst(block).unwrap();
215 fx.add_comment(inst, terminator_head);
218 fx.set_debug_loc(bb_data.terminator().source_info);
220 match &bb_data.terminator().kind {
221 TerminatorKind::Goto { target } => {
222 if let TerminatorKind::Return = fx.mir[*target].terminator().kind {
223 let mut can_immediately_return = true;
224 for stmt in &fx.mir[*target].statements {
225 if let StatementKind::StorageDead(_) = stmt.kind {
227 // FIXME Can sometimes happen, see rust-lang/rust#70531
228 can_immediately_return = false;
233 if can_immediately_return {
234 crate::abi::codegen_return(fx);
239 let block = fx.get_block(*target);
240 fx.bcx.ins().jump(block, &[]);
242 TerminatorKind::Return => {
243 crate::abi::codegen_return(fx);
245 TerminatorKind::Assert {
252 if !fx.tcx.sess.overflow_checks() {
253 if let mir::AssertKind::OverflowNeg(_) = *msg {
254 let target = fx.get_block(*target);
255 fx.bcx.ins().jump(target, &[]);
259 let cond = trans_operand(fx, cond).load_scalar(fx);
261 let target = fx.get_block(*target);
262 let failure = fx.bcx.create_block();
263 fx.cold_blocks.insert(failure);
266 fx.bcx.ins().brz(cond, failure, &[]);
268 fx.bcx.ins().brnz(cond, failure, &[]);
270 fx.bcx.ins().jump(target, &[]);
272 fx.bcx.switch_to_block(failure);
275 .get_caller_location(bb_data.terminator().source_info.span)
279 let lang_item = match msg {
280 AssertKind::BoundsCheck { ref len, ref index } => {
281 let len = trans_operand(fx, len).load_scalar(fx);
282 let index = trans_operand(fx, index).load_scalar(fx);
283 args = [index, len, location];
284 rustc_hir::lang_items::PanicBoundsCheckFnLangItem
287 let msg_str = msg.description();
288 let msg_ptr = fx.anonymous_str("assert", msg_str);
292 .iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap());
293 args = [msg_ptr, msg_len, location];
294 rustc_hir::lang_items::PanicFnLangItem
298 let def_id = fx.tcx.lang_items().require(lang_item).unwrap_or_else(|s| {
301 .span_fatal(bb_data.terminator().source_info.span, &s)
304 let instance = Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx);
305 let symbol_name = fx.tcx.symbol_name(instance).name;
309 vec![fx.pointer_type, fx.pointer_type, fx.pointer_type],
314 crate::trap::trap_unreachable(fx, "panic lang item returned");
317 TerminatorKind::SwitchInt {
323 let discr = trans_operand(fx, discr).load_scalar(fx);
325 if switch_ty.kind == fx.tcx.types.bool.kind {
326 assert_eq!(targets.len(), 2);
327 let then_block = fx.get_block(targets[0]);
328 let else_block = fx.get_block(targets[1]);
329 let test_zero = match **values {
332 _ => unreachable!("{:?}", values),
335 let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr);
336 let (discr, is_inverted) =
337 crate::optimize::peephole::maybe_unwrap_bool_not(&mut fx.bcx, discr);
338 let test_zero = if is_inverted { !test_zero } else { test_zero };
339 let discr = crate::optimize::peephole::maybe_unwrap_bint(&mut fx.bcx, discr);
341 crate::optimize::peephole::make_branchable_value(&mut fx.bcx, discr);
343 fx.bcx.ins().brz(discr, then_block, &[]);
344 fx.bcx.ins().jump(else_block, &[]);
346 fx.bcx.ins().brnz(discr, then_block, &[]);
347 fx.bcx.ins().jump(else_block, &[]);
350 let mut switch = ::cranelift_frontend::Switch::new();
351 for (i, value) in values.iter().enumerate() {
352 let block = fx.get_block(targets[i]);
353 switch.set_entry(*value, block);
355 let otherwise_block = fx.get_block(targets[targets.len() - 1]);
356 switch.emit(&mut fx.bcx, discr, otherwise_block);
359 TerminatorKind::Call {
367 fx.tcx.sess.time("codegen call", || {
368 crate::abi::codegen_terminator_call(
378 TerminatorKind::InlineAsm {
385 crate::inline_asm::codegen_inline_asm(
387 bb_data.terminator().source_info.span,
394 Some(destination) => {
395 let destination_block = fx.get_block(destination);
396 fx.bcx.ins().jump(destination_block, &[]);
399 crate::trap::trap_unreachable(
401 "[corruption] Returned from noreturn inline asm",
406 TerminatorKind::Resume | TerminatorKind::Abort => {
407 trap_unreachable(fx, "[corruption] Unwinding bb reached.");
409 TerminatorKind::Unreachable => {
410 trap_unreachable(fx, "[corruption] Hit unreachable code.");
412 TerminatorKind::Yield { .. }
413 | TerminatorKind::FalseEdge { .. }
414 | TerminatorKind::FalseUnwind { .. }
415 | TerminatorKind::DropAndReplace { .. }
416 | TerminatorKind::GeneratorDrop => {
417 bug!("shouldn't exist at trans {:?}", bb_data.terminator());
419 TerminatorKind::Drop {
424 let drop_place = trans_place(fx, *place);
425 crate::abi::codegen_drop(fx, bb_data.terminator().source_info.span, drop_place);
427 let target_block = fx.get_block(*target);
428 fx.bcx.ins().jump(target_block, &[]);
433 fx.bcx.seal_all_blocks();
438 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
439 #[allow(unused_variables)] cur_block: Block,
440 stmt: &Statement<'tcx>,
442 let _print_guard = crate::PrintOnPanic(|| format!("stmt {:?}", stmt));
444 fx.set_debug_loc(stmt.source_info);
446 #[cfg(false_debug_assertions)]
448 StatementKind::StorageLive(..) | StatementKind::StorageDead(..) => {} // Those are not very useful
450 let inst = fx.bcx.func.layout.last_inst(cur_block).unwrap();
451 fx.add_comment(inst, format!("{:?}", stmt));
456 StatementKind::SetDiscriminant {
460 let place = trans_place(fx, **place);
461 crate::discriminant::codegen_set_discriminant(fx, place, *variant_index);
463 StatementKind::Assign(to_place_and_rval) => {
464 let lval = trans_place(fx, to_place_and_rval.0);
465 let dest_layout = lval.layout();
466 match &to_place_and_rval.1 {
467 Rvalue::Use(operand) => {
468 let val = trans_operand(fx, operand);
469 lval.write_cvalue(fx, val);
471 Rvalue::Ref(_, _, place) | Rvalue::AddressOf(_, place) => {
472 let place = trans_place(fx, *place);
473 let ref_ = place.place_ref(fx, lval.layout());
474 lval.write_cvalue(fx, ref_);
476 Rvalue::ThreadLocalRef(def_id) => {
477 let val = crate::constant::codegen_tls_ref(fx, *def_id, lval.layout());
478 lval.write_cvalue(fx, val);
480 Rvalue::BinaryOp(bin_op, lhs, rhs) => {
481 let lhs = trans_operand(fx, lhs);
482 let rhs = trans_operand(fx, rhs);
484 let res = crate::num::codegen_binop(fx, *bin_op, lhs, rhs);
485 lval.write_cvalue(fx, res);
487 Rvalue::CheckedBinaryOp(bin_op, lhs, rhs) => {
488 let lhs = trans_operand(fx, lhs);
489 let rhs = trans_operand(fx, rhs);
491 let res = if !fx.tcx.sess.overflow_checks() {
493 crate::num::trans_int_binop(fx, *bin_op, lhs, rhs).load_scalar(fx);
494 let is_overflow = fx.bcx.ins().iconst(types::I8, 0);
495 CValue::by_val_pair(val, is_overflow, lval.layout())
497 crate::num::trans_checked_int_binop(fx, *bin_op, lhs, rhs)
500 lval.write_cvalue(fx, res);
502 Rvalue::UnaryOp(un_op, operand) => {
503 let operand = trans_operand(fx, operand);
504 let layout = operand.layout();
505 let val = operand.load_scalar(fx);
506 let res = match un_op {
507 UnOp::Not => match layout.ty.kind {
509 let res = fx.bcx.ins().icmp_imm(IntCC::Equal, val, 0);
510 CValue::by_val(fx.bcx.ins().bint(types::I8, res), layout)
512 ty::Uint(_) | ty::Int(_) => {
513 CValue::by_val(fx.bcx.ins().bnot(val), layout)
515 _ => unreachable!("un op Not for {:?}", layout.ty),
517 UnOp::Neg => match layout.ty.kind {
518 ty::Int(IntTy::I128) => {
519 // FIXME remove this case once ineg.i128 works
520 let zero = CValue::const_val(fx, layout, 0);
521 crate::num::trans_int_binop(fx, BinOp::Sub, zero, operand)
523 ty::Int(_) => CValue::by_val(fx.bcx.ins().ineg(val), layout),
524 ty::Float(_) => CValue::by_val(fx.bcx.ins().fneg(val), layout),
525 _ => unreachable!("un op Neg for {:?}", layout.ty),
528 lval.write_cvalue(fx, res);
530 Rvalue::Cast(CastKind::Pointer(PointerCast::ReifyFnPointer), operand, to_ty) => {
531 let from_ty = fx.monomorphize(&operand.ty(&fx.mir.local_decls, fx.tcx));
532 let to_layout = fx.layout_of(fx.monomorphize(to_ty));
534 ty::FnDef(def_id, substs) => {
535 let func_ref = fx.get_function_ref(
536 Instance::resolve_for_fn_ptr(
538 ParamEnv::reveal_all(),
543 .polymorphize(fx.tcx),
545 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
546 lval.write_cvalue(fx, CValue::by_val(func_addr, to_layout));
548 _ => bug!("Trying to ReifyFnPointer on non FnDef {:?}", from_ty),
551 Rvalue::Cast(CastKind::Pointer(PointerCast::UnsafeFnPointer), operand, to_ty)
552 | Rvalue::Cast(CastKind::Pointer(PointerCast::MutToConstPointer), operand, to_ty)
553 | Rvalue::Cast(CastKind::Pointer(PointerCast::ArrayToPointer), operand, to_ty) => {
554 let to_layout = fx.layout_of(fx.monomorphize(to_ty));
555 let operand = trans_operand(fx, operand);
556 lval.write_cvalue(fx, operand.cast_pointer_to(to_layout));
558 Rvalue::Cast(CastKind::Misc, operand, to_ty) => {
559 let operand = trans_operand(fx, operand);
560 let from_ty = operand.layout().ty;
561 let to_ty = fx.monomorphize(to_ty);
564 fx: &FunctionCx<'_, 'tcx, impl Backend>,
567 ty.builtin_deref(true)
573 has_ptr_meta(fx.tcx, pointee_ty)
579 if is_fat_ptr(fx, from_ty) {
580 if is_fat_ptr(fx, to_ty) {
581 // fat-ptr -> fat-ptr
582 lval.write_cvalue(fx, operand.cast_pointer_to(dest_layout));
584 // fat-ptr -> thin-ptr
585 let (ptr, _extra) = operand.load_scalar_pair(fx);
586 lval.write_cvalue(fx, CValue::by_val(ptr, dest_layout))
588 } else if let ty::Adt(adt_def, _substs) = from_ty.kind {
589 // enum -> discriminant value
590 assert!(adt_def.is_enum());
592 ty::Uint(_) | ty::Int(_) => {}
593 _ => unreachable!("cast adt {} -> {}", from_ty, to_ty),
596 use rustc_target::abi::{Int, TagEncoding, Variants};
598 match &operand.layout().variants {
599 Variants::Single { index } => {
603 .discriminant_for_variant(fx.tcx, *index)
605 let discr = if discr.ty.is_signed() {
606 rustc_middle::mir::interpret::sign_extend(
608 fx.layout_of(discr.ty).size,
614 let discr = CValue::const_val(fx, fx.layout_of(to_ty), discr);
615 lval.write_cvalue(fx, discr);
620 tag_encoding: TagEncoding::Direct,
623 let cast_to = fx.clif_type(dest_layout.ty).unwrap();
625 // Read the tag/niche-encoded discriminant from memory.
627 operand.value_field(fx, mir::Field::new(*tag_field));
628 let encoded_discr = encoded_discr.load_scalar(fx);
630 // Decode the discriminant (specifically if it's niche-encoded).
631 let signed = match tag.value {
632 Int(_, signed) => signed,
635 let val = clif_intcast(fx, encoded_discr, cast_to, signed);
636 let val = CValue::by_val(val, dest_layout);
637 lval.write_cvalue(fx, val);
639 Variants::Multiple { .. } => unreachable!(),
642 let to_clif_ty = fx.clif_type(to_ty).unwrap();
643 let from = operand.load_scalar(fx);
645 let res = clif_int_or_float_cast(
652 lval.write_cvalue(fx, CValue::by_val(res, dest_layout));
656 CastKind::Pointer(PointerCast::ClosureFnPointer(_)),
660 let operand = trans_operand(fx, operand);
661 match operand.layout().ty.kind {
662 ty::Closure(def_id, substs) => {
663 let instance = Instance::resolve_closure(
667 ty::ClosureKind::FnOnce,
669 .polymorphize(fx.tcx);
670 let func_ref = fx.get_function_ref(instance);
671 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
672 lval.write_cvalue(fx, CValue::by_val(func_addr, lval.layout()));
674 _ => bug!("{} cannot be cast to a fn ptr", operand.layout().ty),
677 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), operand, _to_ty) => {
678 let operand = trans_operand(fx, operand);
679 operand.unsize_value(fx, lval);
681 Rvalue::Discriminant(place) => {
682 let place = trans_place(fx, *place);
683 let value = place.to_cvalue(fx);
685 crate::discriminant::codegen_get_discriminant(fx, value, dest_layout);
686 lval.write_cvalue(fx, discr);
688 Rvalue::Repeat(operand, times) => {
689 let operand = trans_operand(fx, operand);
692 .eval(fx.tcx, ParamEnv::reveal_all())
694 .try_to_bits(fx.tcx.data_layout.pointer_size)
697 let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64);
698 let to = lval.place_index(fx, index);
699 to.write_cvalue(fx, operand);
702 Rvalue::Len(place) => {
703 let place = trans_place(fx, *place);
704 let usize_layout = fx.layout_of(fx.tcx.types.usize);
705 let len = codegen_array_len(fx, place);
706 lval.write_cvalue(fx, CValue::by_val(len, usize_layout));
708 Rvalue::NullaryOp(NullOp::Box, content_ty) => {
709 use rustc_hir::lang_items::ExchangeMallocFnLangItem;
711 let usize_type = fx.clif_type(fx.tcx.types.usize).unwrap();
712 let content_ty = fx.monomorphize(content_ty);
713 let layout = fx.layout_of(content_ty);
714 let llsize = fx.bcx.ins().iconst(usize_type, layout.size.bytes() as i64);
718 .iconst(usize_type, layout.align.abi.bytes() as i64);
719 let box_layout = fx.layout_of(fx.tcx.mk_box(content_ty));
722 let def_id = match fx.tcx.lang_items().require(ExchangeMallocFnLangItem) {
727 .fatal(&format!("allocation of `{}` {}", box_layout.ty, s));
730 let instance = ty::Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx);
731 let func_ref = fx.get_function_ref(instance);
732 let call = fx.bcx.ins().call(func_ref, &[llsize, llalign]);
733 let ptr = fx.bcx.inst_results(call)[0];
734 lval.write_cvalue(fx, CValue::by_val(ptr, box_layout));
736 Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
740 .is_sized(fx.tcx.at(stmt.source_info.span), ParamEnv::reveal_all()));
741 let ty_size = fx.layout_of(fx.monomorphize(ty)).size.bytes();
743 CValue::const_val(fx, fx.layout_of(fx.tcx.types.usize), ty_size.into());
744 lval.write_cvalue(fx, val);
746 Rvalue::Aggregate(kind, operands) => match **kind {
747 AggregateKind::Array(_ty) => {
748 for (i, operand) in operands.into_iter().enumerate() {
749 let operand = trans_operand(fx, operand);
750 let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64);
751 let to = lval.place_index(fx, index);
752 to.write_cvalue(fx, operand);
755 _ => unreachable!("shouldn't exist at trans {:?}", to_place_and_rval.1),
759 StatementKind::StorageLive(_)
760 | StatementKind::StorageDead(_)
762 | StatementKind::FakeRead(..)
763 | StatementKind::Retag { .. }
764 | StatementKind::AscribeUserType(..) => {}
766 StatementKind::LlvmInlineAsm(asm) => {
767 use rustc_span::symbol::Symbol;
773 let rustc_hir::LlvmInlineAsmInner {
774 asm: asm_code, // Name
775 outputs: output_names, // Vec<LlvmInlineAsmOutput>
776 inputs: input_names, // Vec<Name>
777 clobbers, // Vec<Name>
783 match asm_code.as_str().trim() {
787 "mov %rbx, %rsi\n cpuid\n xchg %rbx, %rsi" => {
790 &[Symbol::intern("{eax}"), Symbol::intern("{ecx}")]
792 assert_eq!(output_names.len(), 4);
793 for (i, c) in (&["={eax}", "={esi}", "={ecx}", "={edx}"])
797 assert_eq!(&output_names[i].constraint.as_str(), c);
798 assert!(!output_names[i].is_rw);
799 assert!(!output_names[i].is_indirect);
802 assert_eq!(clobbers, &[]);
805 assert!(!alignstack);
807 assert_eq!(inputs.len(), 2);
808 let leaf = trans_operand(fx, &inputs[0].1).load_scalar(fx); // %eax
809 let subleaf = trans_operand(fx, &inputs[1].1).load_scalar(fx); // %ecx
811 let (eax, ebx, ecx, edx) =
812 crate::intrinsics::codegen_cpuid_call(fx, leaf, subleaf);
814 assert_eq!(outputs.len(), 4);
815 trans_place(fx, outputs[0])
816 .write_cvalue(fx, CValue::by_val(eax, fx.layout_of(fx.tcx.types.u32)));
817 trans_place(fx, outputs[1])
818 .write_cvalue(fx, CValue::by_val(ebx, fx.layout_of(fx.tcx.types.u32)));
819 trans_place(fx, outputs[2])
820 .write_cvalue(fx, CValue::by_val(ecx, fx.layout_of(fx.tcx.types.u32)));
821 trans_place(fx, outputs[3])
822 .write_cvalue(fx, CValue::by_val(edx, fx.layout_of(fx.tcx.types.u32)));
825 assert_eq!(input_names, &[Symbol::intern("{ecx}")]);
827 assert_eq!(output_names.len(), 2);
828 for (i, c) in (&["={eax}", "={edx}"]).iter().enumerate() {
829 assert_eq!(&output_names[i].constraint.as_str(), c);
830 assert!(!output_names[i].is_rw);
831 assert!(!output_names[i].is_indirect);
834 assert_eq!(clobbers, &[]);
837 assert!(!alignstack);
839 crate::trap::trap_unimplemented(fx, "_xgetbv arch intrinsic is not supported");
841 // ___chkstk, ___chkstk_ms and __alloca are only used on Windows
844 .symbol_name(fx.instance)
846 .starts_with("___chkstk") =>
848 crate::trap::trap_unimplemented(fx, "Stack probes are not supported");
850 _ if fx.tcx.symbol_name(fx.instance).name == "__alloca" => {
851 crate::trap::trap_unimplemented(fx, "Alloca is not supported");
853 // Used in sys::windows::abort_internal
855 crate::trap::trap_unimplemented(fx, "Windows abort");
860 .span_fatal(stmt.source_info.span, "Inline assembly is not supported"),
863 StatementKind::Coverage { .. } => fx.tcx.sess.fatal("-Zcoverage is unimplemented"),
867 fn codegen_array_len<'tcx>(
868 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
871 match place.layout().ty.kind {
872 ty::Array(_elem_ty, len) => {
875 .eval(fx.tcx, ParamEnv::reveal_all())
876 .eval_usize(fx.tcx, ParamEnv::reveal_all()) as i64;
877 fx.bcx.ins().iconst(fx.pointer_type, len)
879 ty::Slice(_elem_ty) => place
880 .to_ptr_maybe_unsized()
882 .expect("Length metadata for slice place"),
883 _ => bug!("Rvalue::Len({:?})", place),
887 pub(crate) fn trans_place<'tcx>(
888 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
891 let mut cplace = fx.get_local_place(place.local);
893 for elem in place.projection {
895 PlaceElem::Deref => {
896 cplace = cplace.place_deref(fx);
898 PlaceElem::Field(field, _ty) => {
899 cplace = cplace.place_field(fx, field);
901 PlaceElem::Index(local) => {
902 let index = fx.get_local_place(local).to_cvalue(fx).load_scalar(fx);
903 cplace = cplace.place_index(fx, index);
905 PlaceElem::ConstantIndex {
910 let index = if !from_end {
911 fx.bcx.ins().iconst(fx.pointer_type, i64::from(offset))
913 let len = codegen_array_len(fx, cplace);
914 fx.bcx.ins().iadd_imm(len, -i64::from(offset))
916 cplace = cplace.place_index(fx, index);
918 PlaceElem::Subslice { from, to, from_end } => {
919 // These indices are generated by slice patterns.
920 // slice[from:-to] in Python terms.
922 match cplace.layout().ty.kind {
923 ty::Array(elem_ty, _len) => {
924 assert!(!from_end, "array subslices are never `from_end`");
925 let elem_layout = fx.layout_of(elem_ty);
926 let ptr = cplace.to_ptr();
927 cplace = CPlace::for_ptr(
928 ptr.offset_i64(fx, elem_layout.size.bytes() as i64 * i64::from(from)),
929 fx.layout_of(fx.tcx.mk_array(elem_ty, u64::from(to) - u64::from(from))),
932 ty::Slice(elem_ty) => {
933 assert!(from_end, "slice subslices should be `from_end`");
934 let elem_layout = fx.layout_of(elem_ty);
935 let (ptr, len) = cplace.to_ptr_maybe_unsized();
936 let len = len.unwrap();
937 cplace = CPlace::for_ptr_with_extra(
938 ptr.offset_i64(fx, elem_layout.size.bytes() as i64 * i64::from(from)),
941 .iadd_imm(len, -(i64::from(from) + i64::from(to))),
948 PlaceElem::Downcast(_adt_def, variant) => {
949 cplace = cplace.downcast_variant(fx, variant);
957 pub(crate) fn trans_operand<'tcx>(
958 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
959 operand: &Operand<'tcx>,
962 Operand::Move(place) | Operand::Copy(place) => {
963 let cplace = trans_place(fx, *place);
966 Operand::Constant(const_) => crate::constant::trans_constant(fx, const_),