1 use rustc_middle::ty::adjustment::PointerCast;
2 use rustc_index::vec::IndexVec;
6 pub(crate) fn trans_fn<'tcx, B: Backend + 'static>(
7 cx: &mut crate::CodegenCx<'tcx, B>,
8 instance: Instance<'tcx>,
13 let mir = tcx.instance_mir(instance.def);
16 let (name, sig) = get_function_name_and_sig(tcx, cx.module.isa().triple(), instance, false);
17 let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
19 // Make the FunctionBuilder
20 let mut func_ctx = FunctionBuilderContext::new();
21 cx.cached_context.clear();
23 let mut func = Function::new();
24 std::mem::swap(&mut cx.cached_context.func, &mut func);
25 func.name = ExternalName::user(0, func_id.as_u32());
27 func.collect_debug_info();
29 let mut bcx = FunctionBuilder::new(&mut func, &mut func_ctx);
32 let start_block = bcx.create_block();
33 let block_map: IndexVec<BasicBlock, Block> = (0..mir.basic_blocks().len()).map(|_| bcx.create_block()).collect();
36 let pointer_type = cx.module.target_config().pointer_type();
37 let clif_comments = crate::pretty_clif::CommentWriter::new(tcx, instance);
39 let mut fx = FunctionCx {
49 local_map: FxHashMap::with_capacity_and_hasher(mir.local_decls.len(), Default::default()),
50 caller_location: None, // set by `codegen_fn_prelude`
51 cold_blocks: EntitySet::new(),
54 source_info_set: indexmap::IndexSet::new(),
60 let arg_uninhabited = fx.mir.args_iter().any(|arg| fx.layout_of(fx.monomorphize(&fx.mir.local_decls[arg].ty)).abi.is_uninhabited());
63 fx.bcx.append_block_params_for_function_params(fx.block_map[START_BLOCK]);
64 fx.bcx.switch_to_block(fx.block_map[START_BLOCK]);
65 crate::trap::trap_unreachable(&mut fx, "function has uninhabited argument");
67 tcx.sess.time("codegen clif ir", || {
68 tcx.sess.time("codegen prelude", || crate::abi::codegen_fn_prelude(&mut fx, start_block));
69 codegen_fn_content(&mut fx);
73 // Recover all necessary data from fx, before accessing func will prevent future access to it.
74 let instance = fx.instance;
75 let mut clif_comments = fx.clif_comments;
76 let source_info_set = fx.source_info_set;
77 let local_map = fx.local_map;
78 let cold_blocks = fx.cold_blocks;
80 // Store function in context
81 let context = &mut cx.cached_context;
84 crate::pretty_clif::write_clif_file(
94 verify_func(tcx, &clif_comments, &context.func);
96 // Perform rust specific optimizations
97 tcx.sess.time("optimize clif ir", || {
98 crate::optimize::optimize_function(tcx, instance, context, &cold_blocks, &mut clif_comments);
101 // If the return block is not reachable, then the SSA builder may have inserted a `iconst.i128`
102 // instruction, which doesn't have an encoding.
103 context.compute_cfg();
104 context.compute_domtree();
105 context.eliminate_unreachable_code(cx.module.isa()).unwrap();
108 let module = &mut cx.module;
111 || module.define_function(
114 &mut cranelift_codegen::binemit::NullTrapSink {},
118 // Write optimized function to file for debugging
119 crate::pretty_clif::write_clif_file(
122 Some(cx.module.isa()),
128 // Define debuginfo for function
129 let isa = cx.module.isa();
130 let debug_context = &mut cx.debug_context;
131 let unwind_context = &mut cx.unwind_context;
132 tcx.sess.time("generate debug info", || {
133 if let Some(debug_context) = debug_context {
134 debug_context.define_function(instance, func_id, &name, isa, context, &source_info_set, local_map);
136 unwind_context.add_function(func_id, &context, isa);
139 // Clear context to make it usable for the next function
143 pub(crate) fn verify_func(tcx: TyCtxt<'_>, writer: &crate::pretty_clif::CommentWriter, func: &Function) {
144 tcx.sess.time("verify clif ir", || {
145 let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder());
146 match cranelift_codegen::verify_function(&func, &flags) {
149 tcx.sess.err(&format!("{:?}", err));
150 let pretty_error = cranelift_codegen::print_errors::pretty_verifier_error(
153 Some(Box::new(writer)),
157 .fatal(&format!("cranelift verify error:\n{}", pretty_error));
163 fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, impl Backend>) {
164 crate::constant::check_constants(fx);
166 for (bb, bb_data) in fx.mir.basic_blocks().iter_enumerated() {
167 let block = fx.get_block(bb);
168 fx.bcx.switch_to_block(block);
170 if bb_data.is_cleanup {
171 // Unwinding after panicking is not supported
174 // FIXME once unwinding is supported uncomment next lines
175 // // Unwinding is unlikely to happen, so mark cleanup block's as cold.
176 // fx.cold_blocks.insert(block);
180 for stmt in &bb_data.statements {
181 fx.set_debug_loc(stmt.source_info);
182 trans_stmt(fx, block, stmt);
185 #[cfg(debug_assertions)]
187 let mut terminator_head = "\n".to_string();
191 .fmt_head(&mut terminator_head)
193 let inst = fx.bcx.func.layout.last_inst(block).unwrap();
194 fx.add_comment(inst, terminator_head);
197 fx.set_debug_loc(bb_data.terminator().source_info);
199 match &bb_data.terminator().kind {
200 TerminatorKind::Goto { target } => {
201 if let TerminatorKind::Return = fx.mir[*target].terminator().kind {
202 let mut can_immediately_return = true;
203 for stmt in &fx.mir[*target].statements {
204 if let StatementKind::StorageDead(_) = stmt.kind {
206 // FIXME Can sometimes happen, see rust-lang/rust#70531
207 can_immediately_return = false;
212 if can_immediately_return {
213 crate::abi::codegen_return(fx);
218 let block = fx.get_block(*target);
219 fx.bcx.ins().jump(block, &[]);
221 TerminatorKind::Return => {
222 crate::abi::codegen_return(fx);
224 TerminatorKind::Assert {
231 if !fx.tcx.sess.overflow_checks() {
232 if let mir::AssertKind::OverflowNeg(_) = *msg {
233 let target = fx.get_block(*target);
234 fx.bcx.ins().jump(target, &[]);
238 let cond = trans_operand(fx, cond).load_scalar(fx);
240 let target = fx.get_block(*target);
241 let failure = fx.bcx.create_block();
242 fx.cold_blocks.insert(failure);
245 fx.bcx.ins().brz(cond, failure, &[]);
247 fx.bcx.ins().brnz(cond, failure, &[]);
249 fx.bcx.ins().jump(target, &[]);
251 fx.bcx.switch_to_block(failure);
253 let location = fx.get_caller_location(bb_data.terminator().source_info.span).load_scalar(fx);
256 let lang_item = match msg {
257 AssertKind::BoundsCheck { ref len, ref index } => {
258 let len = trans_operand(fx, len).load_scalar(fx);
259 let index = trans_operand(fx, index).load_scalar(fx);
260 args = [index, len, location];
261 rustc_hir::lang_items::PanicBoundsCheckFnLangItem
264 let msg_str = msg.description();
265 let msg_ptr = fx.anonymous_str("assert", msg_str);
266 let msg_len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap());
267 args = [msg_ptr, msg_len, location];
268 rustc_hir::lang_items::PanicFnLangItem
272 let def_id = fx.tcx.lang_items().require(lang_item).unwrap_or_else(|s| {
273 fx.tcx.sess.span_fatal(bb_data.terminator().source_info.span, &s)
276 let instance = Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx);
277 let symbol_name = fx.tcx.symbol_name(instance).name;
279 fx.lib_call(&*symbol_name, vec![fx.pointer_type, fx.pointer_type, fx.pointer_type], vec![], &args);
281 crate::trap::trap_unreachable(fx, "panic lang item returned");
284 TerminatorKind::SwitchInt {
290 let discr = trans_operand(fx, discr).load_scalar(fx);
291 let mut switch = ::cranelift_frontend::Switch::new();
292 for (i, value) in values.iter().enumerate() {
293 let block = fx.get_block(targets[i]);
294 switch.set_entry(*value, block);
296 let otherwise_block = fx.get_block(targets[targets.len() - 1]);
297 switch.emit(&mut fx.bcx, discr, otherwise_block);
299 TerminatorKind::Call {
307 fx.tcx.sess.time("codegen call", || crate::abi::codegen_terminator_call(
316 TerminatorKind::InlineAsm {
323 crate::inline_asm::codegen_inline_asm(
325 bb_data.terminator().source_info.span,
332 Some(destination) => {
333 let destination_block = fx.get_block(destination);
334 fx.bcx.ins().jump(destination_block, &[]);
337 crate::trap::trap_unreachable(fx, "[corruption] Returned from noreturn inline asm");
341 TerminatorKind::Resume | TerminatorKind::Abort => {
342 trap_unreachable(fx, "[corruption] Unwinding bb reached.");
344 TerminatorKind::Unreachable => {
345 trap_unreachable(fx, "[corruption] Hit unreachable code.");
347 TerminatorKind::Yield { .. }
348 | TerminatorKind::FalseEdge { .. }
349 | TerminatorKind::FalseUnwind { .. }
350 | TerminatorKind::DropAndReplace { .. }
351 | TerminatorKind::GeneratorDrop => {
352 bug!("shouldn't exist at trans {:?}", bb_data.terminator());
354 TerminatorKind::Drop {
359 let drop_place = trans_place(fx, *place);
360 crate::abi::codegen_drop(fx, bb_data.terminator().source_info.span, drop_place);
362 let target_block = fx.get_block(*target);
363 fx.bcx.ins().jump(target_block, &[]);
368 fx.bcx.seal_all_blocks();
373 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
374 #[allow(unused_variables)]
376 stmt: &Statement<'tcx>,
378 let _print_guard = crate::PrintOnPanic(|| format!("stmt {:?}", stmt));
380 fx.set_debug_loc(stmt.source_info);
382 #[cfg(false_debug_assertions)]
384 StatementKind::StorageLive(..) | StatementKind::StorageDead(..) => {} // Those are not very useful
386 let inst = fx.bcx.func.layout.last_inst(cur_block).unwrap();
387 fx.add_comment(inst, format!("{:?}", stmt));
392 StatementKind::SetDiscriminant {
396 let place = trans_place(fx, **place);
397 crate::discriminant::codegen_set_discriminant(fx, place, *variant_index);
399 StatementKind::Assign(to_place_and_rval) => {
400 let lval = trans_place(fx, to_place_and_rval.0);
401 let dest_layout = lval.layout();
402 match &to_place_and_rval.1 {
403 Rvalue::Use(operand) => {
404 let val = trans_operand(fx, operand);
405 lval.write_cvalue(fx, val);
407 Rvalue::Ref(_, _, place) | Rvalue::AddressOf(_, place) => {
408 let place = trans_place(fx, *place);
409 place.write_place_ref(fx, lval);
411 Rvalue::ThreadLocalRef(def_id) => {
412 let val = crate::constant::codegen_tls_ref(fx, *def_id, lval.layout());
413 lval.write_cvalue(fx, val);
415 Rvalue::BinaryOp(bin_op, lhs, rhs) => {
416 let lhs = trans_operand(fx, lhs);
417 let rhs = trans_operand(fx, rhs);
419 let res = crate::num::codegen_binop(fx, *bin_op, lhs, rhs);
420 lval.write_cvalue(fx, res);
422 Rvalue::CheckedBinaryOp(bin_op, lhs, rhs) => {
423 let lhs = trans_operand(fx, lhs);
424 let rhs = trans_operand(fx, rhs);
426 let res = if !fx.tcx.sess.overflow_checks() {
428 crate::num::trans_int_binop(fx, *bin_op, lhs, rhs).load_scalar(fx);
429 let is_overflow = fx.bcx.ins().iconst(types::I8, 0);
430 CValue::by_val_pair(val, is_overflow, lval.layout())
432 crate::num::trans_checked_int_binop(fx, *bin_op, lhs, rhs)
435 lval.write_cvalue(fx, res);
437 Rvalue::UnaryOp(un_op, operand) => {
438 let operand = trans_operand(fx, operand);
439 let layout = operand.layout();
440 let val = operand.load_scalar(fx);
441 let res = match un_op {
443 match layout.ty.kind {
445 let res = fx.bcx.ins().icmp_imm(IntCC::Equal, val, 0);
446 CValue::by_val(fx.bcx.ins().bint(types::I8, res), layout)
448 ty::Uint(_) | ty::Int(_) => {
449 CValue::by_val(fx.bcx.ins().bnot(val), layout)
451 _ => unreachable!("un op Not for {:?}", layout.ty),
454 UnOp::Neg => match layout.ty.kind {
455 ty::Int(IntTy::I128) => {
456 // FIXME remove this case once ineg.i128 works
457 let zero = CValue::const_val(fx, layout, 0);
458 crate::num::trans_int_binop(fx, BinOp::Sub, zero, operand)
461 CValue::by_val(fx.bcx.ins().ineg(val), layout)
464 CValue::by_val(fx.bcx.ins().fneg(val), layout)
466 _ => unreachable!("un op Neg for {:?}", layout.ty),
469 lval.write_cvalue(fx, res);
471 Rvalue::Cast(CastKind::Pointer(PointerCast::ReifyFnPointer), operand, to_ty) => {
472 let from_ty = fx.monomorphize(&operand.ty(&fx.mir.local_decls, fx.tcx));
473 let to_layout = fx.layout_of(fx.monomorphize(to_ty));
475 ty::FnDef(def_id, substs) => {
476 let func_ref = fx.get_function_ref(
477 Instance::resolve_for_fn_ptr(fx.tcx, ParamEnv::reveal_all(), def_id, substs)
479 .polymorphize(fx.tcx),
481 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
482 lval.write_cvalue(fx, CValue::by_val(func_addr, to_layout));
484 _ => bug!("Trying to ReifyFnPointer on non FnDef {:?}", from_ty),
487 Rvalue::Cast(CastKind::Pointer(PointerCast::UnsafeFnPointer), operand, to_ty)
488 | Rvalue::Cast(CastKind::Pointer(PointerCast::MutToConstPointer), operand, to_ty)
489 | Rvalue::Cast(CastKind::Pointer(PointerCast::ArrayToPointer), operand, to_ty) => {
490 let to_layout = fx.layout_of(fx.monomorphize(to_ty));
491 let operand = trans_operand(fx, operand);
492 lval.write_cvalue(fx, operand.cast_pointer_to(to_layout));
494 Rvalue::Cast(CastKind::Misc, operand, to_ty) => {
495 let operand = trans_operand(fx, operand);
496 let from_ty = operand.layout().ty;
497 let to_ty = fx.monomorphize(to_ty);
500 fx: &FunctionCx<'_, 'tcx, impl Backend>,
503 ty.builtin_deref(true)
508 }| has_ptr_meta(fx.tcx, pointee_ty),
513 if is_fat_ptr(fx, from_ty) {
514 if is_fat_ptr(fx, to_ty) {
515 // fat-ptr -> fat-ptr
516 lval.write_cvalue(fx, operand.cast_pointer_to(dest_layout));
518 // fat-ptr -> thin-ptr
519 let (ptr, _extra) = operand.load_scalar_pair(fx);
520 lval.write_cvalue(fx, CValue::by_val(ptr, dest_layout))
522 } else if let ty::Adt(adt_def, _substs) = from_ty.kind {
523 // enum -> discriminant value
524 assert!(adt_def.is_enum());
526 ty::Uint(_) | ty::Int(_) => {}
527 _ => unreachable!("cast adt {} -> {}", from_ty, to_ty),
530 use rustc_target::abi::{TagEncoding, Int, Variants};
532 match &operand.layout().variants {
533 Variants::Single { index } => {
534 let discr = operand.layout().ty.discriminant_for_variant(fx.tcx, *index).unwrap();
535 let discr = if discr.ty.is_signed() {
536 rustc_middle::mir::interpret::sign_extend(discr.val, fx.layout_of(discr.ty).size)
541 let discr = CValue::const_val(fx, fx.layout_of(to_ty), discr);
542 lval.write_cvalue(fx, discr);
547 tag_encoding: TagEncoding::Direct,
550 let cast_to = fx.clif_type(dest_layout.ty).unwrap();
552 // Read the tag/niche-encoded discriminant from memory.
553 let encoded_discr = operand.value_field(fx, mir::Field::new(*tag_field));
554 let encoded_discr = encoded_discr.load_scalar(fx);
556 // Decode the discriminant (specifically if it's niche-encoded).
557 let signed = match tag.value {
558 Int(_, signed) => signed,
561 let val = clif_intcast(fx, encoded_discr, cast_to, signed);
562 let val = CValue::by_val(val, dest_layout);
563 lval.write_cvalue(fx, val);
565 Variants::Multiple { ..} => unreachable!(),
568 let to_clif_ty = fx.clif_type(to_ty).unwrap();
569 let from = operand.load_scalar(fx);
571 let res = clif_int_or_float_cast(
578 lval.write_cvalue(fx, CValue::by_val(res, dest_layout));
581 Rvalue::Cast(CastKind::Pointer(PointerCast::ClosureFnPointer(_)), operand, _to_ty) => {
582 let operand = trans_operand(fx, operand);
583 match operand.layout().ty.kind {
584 ty::Closure(def_id, substs) => {
585 let instance = Instance::resolve_closure(
589 ty::ClosureKind::FnOnce,
590 ).polymorphize(fx.tcx);
591 let func_ref = fx.get_function_ref(instance);
592 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
593 lval.write_cvalue(fx, CValue::by_val(func_addr, lval.layout()));
595 _ => bug!("{} cannot be cast to a fn ptr", operand.layout().ty),
598 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), operand, _to_ty) => {
599 let operand = trans_operand(fx, operand);
600 operand.unsize_value(fx, lval);
602 Rvalue::Discriminant(place) => {
603 let place = trans_place(fx, *place);
604 let value = place.to_cvalue(fx);
606 crate::discriminant::codegen_get_discriminant(fx, value, dest_layout);
607 lval.write_cvalue(fx, discr);
609 Rvalue::Repeat(operand, times) => {
610 let operand = trans_operand(fx, operand);
613 .eval(fx.tcx, ParamEnv::reveal_all())
615 .try_to_bits(fx.tcx.data_layout.pointer_size)
618 let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64);
619 let to = lval.place_index(fx, index);
620 to.write_cvalue(fx, operand);
623 Rvalue::Len(place) => {
624 let place = trans_place(fx, *place);
625 let usize_layout = fx.layout_of(fx.tcx.types.usize);
626 let len = codegen_array_len(fx, place);
627 lval.write_cvalue(fx, CValue::by_val(len, usize_layout));
629 Rvalue::NullaryOp(NullOp::Box, content_ty) => {
630 use rustc_hir::lang_items::ExchangeMallocFnLangItem;
632 let usize_type = fx.clif_type(fx.tcx.types.usize).unwrap();
633 let content_ty = fx.monomorphize(content_ty);
634 let layout = fx.layout_of(content_ty);
635 let llsize = fx.bcx.ins().iconst(usize_type, layout.size.bytes() as i64);
639 .iconst(usize_type, layout.align.abi.bytes() as i64);
640 let box_layout = fx.layout_of(fx.tcx.mk_box(content_ty));
643 let def_id = match fx.tcx.lang_items().require(ExchangeMallocFnLangItem) {
648 .fatal(&format!("allocation of `{}` {}", box_layout.ty, s));
651 let instance = ty::Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx);
652 let func_ref = fx.get_function_ref(instance);
653 let call = fx.bcx.ins().call(func_ref, &[llsize, llalign]);
654 let ptr = fx.bcx.inst_results(call)[0];
655 lval.write_cvalue(fx, CValue::by_val(ptr, box_layout));
657 Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
661 .is_sized(fx.tcx.at(stmt.source_info.span), ParamEnv::reveal_all()));
662 let ty_size = fx.layout_of(fx.monomorphize(ty)).size.bytes();
663 let val = CValue::const_val(fx, fx.layout_of(fx.tcx.types.usize), ty_size.into());
664 lval.write_cvalue(fx, val);
666 Rvalue::Aggregate(kind, operands) => match **kind {
667 AggregateKind::Array(_ty) => {
668 for (i, operand) in operands.into_iter().enumerate() {
669 let operand = trans_operand(fx, operand);
670 let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64);
671 let to = lval.place_index(fx, index);
672 to.write_cvalue(fx, operand);
675 _ => unreachable!("shouldn't exist at trans {:?}", to_place_and_rval.1),
679 StatementKind::StorageLive(_)
680 | StatementKind::StorageDead(_)
682 | StatementKind::FakeRead(..)
683 | StatementKind::Retag { .. }
684 | StatementKind::AscribeUserType(..) => {}
686 StatementKind::LlvmInlineAsm(asm) => {
687 use rustc_span::symbol::Symbol;
693 let rustc_hir::LlvmInlineAsmInner {
694 asm: asm_code, // Name
695 outputs: output_names, // Vec<LlvmInlineAsmOutput>
696 inputs: input_names, // Vec<Name>
697 clobbers, // Vec<Name>
703 match asm_code.as_str().trim() {
707 "mov %rbx, %rsi\n cpuid\n xchg %rbx, %rsi" => {
708 assert_eq!(input_names, &[Symbol::intern("{eax}"), Symbol::intern("{ecx}")]);
709 assert_eq!(output_names.len(), 4);
710 for (i, c) in (&["={eax}", "={esi}", "={ecx}", "={edx}"]).iter().enumerate() {
711 assert_eq!(&output_names[i].constraint.as_str(), c);
712 assert!(!output_names[i].is_rw);
713 assert!(!output_names[i].is_indirect);
716 assert_eq!(clobbers, &[]);
719 assert!(!alignstack);
721 assert_eq!(inputs.len(), 2);
722 let leaf = trans_operand(fx, &inputs[0].1).load_scalar(fx); // %eax
723 let subleaf = trans_operand(fx, &inputs[1].1).load_scalar(fx); // %ecx
725 let (eax, ebx, ecx, edx) = crate::intrinsics::codegen_cpuid_call(fx, leaf, subleaf);
727 assert_eq!(outputs.len(), 4);
728 trans_place(fx, outputs[0]).write_cvalue(fx, CValue::by_val(eax, fx.layout_of(fx.tcx.types.u32)));
729 trans_place(fx, outputs[1]).write_cvalue(fx, CValue::by_val(ebx, fx.layout_of(fx.tcx.types.u32)));
730 trans_place(fx, outputs[2]).write_cvalue(fx, CValue::by_val(ecx, fx.layout_of(fx.tcx.types.u32)));
731 trans_place(fx, outputs[3]).write_cvalue(fx, CValue::by_val(edx, fx.layout_of(fx.tcx.types.u32)));
734 assert_eq!(input_names, &[Symbol::intern("{ecx}")]);
736 assert_eq!(output_names.len(), 2);
737 for (i, c) in (&["={eax}", "={edx}"]).iter().enumerate() {
738 assert_eq!(&output_names[i].constraint.as_str(), c);
739 assert!(!output_names[i].is_rw);
740 assert!(!output_names[i].is_indirect);
743 assert_eq!(clobbers, &[]);
746 assert!(!alignstack);
748 crate::trap::trap_unimplemented(fx, "_xgetbv arch intrinsic is not supported");
750 // ___chkstk, ___chkstk_ms and __alloca are only used on Windows
751 _ if fx.tcx.symbol_name(fx.instance).name.starts_with("___chkstk") => {
752 crate::trap::trap_unimplemented(fx, "Stack probes are not supported");
754 _ if fx.tcx.symbol_name(fx.instance).name == "__alloca" => {
755 crate::trap::trap_unimplemented(fx, "Alloca is not supported");
757 // Used in sys::windows::abort_internal
759 crate::trap::trap_unimplemented(fx, "Windows abort");
761 _ => fx.tcx.sess.span_fatal(stmt.source_info.span, "Inline assembly is not supported"),
767 fn codegen_array_len<'tcx>(
768 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
771 match place.layout().ty.kind {
772 ty::Array(_elem_ty, len) => {
773 let len = fx.monomorphize(&len)
774 .eval(fx.tcx, ParamEnv::reveal_all())
775 .eval_usize(fx.tcx, ParamEnv::reveal_all()) as i64;
776 fx.bcx.ins().iconst(fx.pointer_type, len)
778 ty::Slice(_elem_ty) => place
779 .to_ptr_maybe_unsized()
781 .expect("Length metadata for slice place"),
782 _ => bug!("Rvalue::Len({:?})", place),
786 pub(crate) fn trans_place<'tcx>(
787 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
790 let mut cplace = fx.get_local_place(place.local);
792 for elem in place.projection {
794 PlaceElem::Deref => {
795 cplace = cplace.place_deref(fx);
797 PlaceElem::Field(field, _ty) => {
798 cplace = cplace.place_field(fx, field);
800 PlaceElem::Index(local) => {
801 let index = fx.get_local_place(local).to_cvalue(fx).load_scalar(fx);
802 cplace = cplace.place_index(fx, index);
804 PlaceElem::ConstantIndex {
809 let index = if !from_end {
810 fx.bcx.ins().iconst(fx.pointer_type, i64::from(offset))
812 let len = codegen_array_len(fx, cplace);
813 fx.bcx.ins().iadd_imm(len, -i64::from(offset))
815 cplace = cplace.place_index(fx, index);
817 PlaceElem::Subslice { from, to, from_end } => {
818 // These indices are generated by slice patterns.
819 // slice[from:-to] in Python terms.
821 match cplace.layout().ty.kind {
822 ty::Array(elem_ty, _len) => {
823 assert!(!from_end, "array subslices are never `from_end`");
824 let elem_layout = fx.layout_of(elem_ty);
825 let ptr = cplace.to_ptr();
826 cplace = CPlace::for_ptr(
827 ptr.offset_i64(fx, elem_layout.size.bytes() as i64 * i64::from(from)),
828 fx.layout_of(fx.tcx.mk_array(elem_ty, u64::from(to) - u64::from(from))),
831 ty::Slice(elem_ty) => {
832 assert!(from_end, "slice subslices should be `from_end`");
833 let elem_layout = fx.layout_of(elem_ty);
834 let (ptr, len) = cplace.to_ptr_maybe_unsized();
835 let len = len.unwrap();
836 cplace = CPlace::for_ptr_with_extra(
837 ptr.offset_i64(fx, elem_layout.size.bytes() as i64 * i64::from(from)),
838 fx.bcx.ins().iadd_imm(len, -(i64::from(from) + i64::from(to))),
845 PlaceElem::Downcast(_adt_def, variant) => {
846 cplace = cplace.downcast_variant(fx, variant);
854 pub(crate) fn trans_operand<'tcx>(
855 fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
856 operand: &Operand<'tcx>,
859 Operand::Move(place) | Operand::Copy(place) => {
860 let cplace = trans_place(fx, *place);
863 Operand::Constant(const_) => crate::constant::trans_constant(fx, const_),