1 use rustc::ty::adjustment::PointerCast;
5 pub fn trans_fn<'a, 'clif, 'tcx: 'a, B: Backend + 'static>(
6 cx: &mut crate::CodegenCx<'clif, 'tcx, B>,
7 instance: Instance<'tcx>,
12 let mir = tcx.instance_mir(instance.def);
15 let (name, sig) = get_function_name_and_sig(tcx, instance, false);
16 let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
17 let mut debug_context = cx
20 .map(|debug_context| FunctionDebugContext::new(tcx, debug_context, mir, &name, &sig));
22 // Make FunctionBuilder
23 let mut func = Function::with_name_signature(ExternalName::user(0, 0), sig);
24 let mut func_ctx = FunctionBuilderContext::new();
25 let mut bcx = FunctionBuilder::new(&mut func, &mut func_ctx);
28 let start_ebb = bcx.create_ebb();
29 let mut ebb_map: HashMap<BasicBlock, Ebb> = HashMap::new();
30 for (bb, _bb_data) in mir.basic_blocks().iter_enumerated() {
31 ebb_map.insert(bb, bcx.create_ebb());
35 let pointer_type = cx.module.target_config().pointer_type();
36 let clif_comments = crate::pretty_clif::CommentWriter::new(tcx, instance);
38 let mut fx = FunctionCx {
48 local_map: HashMap::new(),
51 constants: &mut cx.ccx,
52 caches: &mut cx.caches,
53 source_info_set: indexmap::IndexSet::new(),
56 with_unimpl_span(fx.mir.span, || {
57 crate::abi::codegen_fn_prelude(&mut fx, start_ebb);
58 codegen_fn_content(&mut fx);
61 // Recover all necessary data from fx, before accessing func will prevent future access to it.
62 let instance = fx.instance;
63 let clif_comments = fx.clif_comments;
64 let source_info_set = fx.source_info_set;
66 #[cfg(debug_assertions)]
67 crate::pretty_clif::write_clif_file(cx.tcx, "unopt", instance, &func, &clif_comments, None);
70 verify_func(tcx, &clif_comments, &func);
73 let context = &mut cx.caches.context;
76 .define_function(func_id, context)
79 let value_ranges = context.build_value_labels_ranges(cx.module.isa()).expect("value location ranges");
81 // Write optimized function to file for debugging
82 #[cfg(debug_assertions)]
83 crate::pretty_clif::write_clif_file(cx.tcx, "opt", instance, &context.func, &clif_comments, Some(&value_ranges));
85 // Define debuginfo for function
86 let isa = cx.module.isa();
89 .map(|x| x.define(tcx, context, isa, &source_info_set));
91 // Clear context to make it usable for the next function
95 fn verify_func(tcx: TyCtxt, writer: &crate::pretty_clif::CommentWriter, func: &Function) {
96 let flags = settings::Flags::new(settings::builder());
97 match ::cranelift::codegen::verify_function(&func, &flags) {
100 tcx.sess.err(&format!("{:?}", err));
101 let pretty_error = ::cranelift::codegen::print_errors::pretty_verifier_error(
104 Some(Box::new(writer)),
108 .fatal(&format!("cranelift verify error:\n{}", pretty_error));
113 fn codegen_fn_content<'a, 'tcx: 'a>(fx: &mut FunctionCx<'a, 'tcx, impl Backend>) {
114 for (bb, bb_data) in fx.mir.basic_blocks().iter_enumerated() {
115 if bb_data.is_cleanup {
116 // Unwinding after panicking is not supported
120 let ebb = fx.get_ebb(bb);
121 fx.bcx.switch_to_block(ebb);
124 for stmt in &bb_data.statements {
125 fx.set_debug_loc(stmt.source_info);
126 trans_stmt(fx, ebb, stmt);
129 #[cfg(debug_assertions)]
131 let mut terminator_head = "\n".to_string();
135 .fmt_head(&mut terminator_head)
137 let inst = fx.bcx.func.layout.last_inst(ebb).unwrap();
138 fx.add_comment(inst, terminator_head);
141 fx.set_debug_loc(bb_data.terminator().source_info);
143 match &bb_data.terminator().kind {
144 TerminatorKind::Goto { target } => {
145 let ebb = fx.get_ebb(*target);
146 fx.bcx.ins().jump(ebb, &[]);
148 TerminatorKind::Return => {
149 crate::abi::codegen_return(fx);
151 TerminatorKind::Assert {
158 let cond = trans_operand(fx, cond).load_scalar(fx);
159 // TODO HACK brz/brnz for i8/i16 is not yet implemented
160 let cond = fx.bcx.ins().uextend(types::I32, cond);
161 let target = fx.get_ebb(*target);
163 fx.bcx.ins().brnz(cond, target, &[]);
165 fx.bcx.ins().brz(cond, target, &[]);
167 trap_panic(fx, format!("[panic] Assert {:?} at {:?} failed.", msg, bb_data.terminator().source_info.span));
170 TerminatorKind::SwitchInt {
176 let discr = trans_operand(fx, discr).load_scalar(fx);
177 let mut switch = ::cranelift::frontend::Switch::new();
178 for (i, value) in values.iter().enumerate() {
179 let ebb = fx.get_ebb(targets[i]);
180 switch.set_entry(*value as u64, ebb);
182 let otherwise_ebb = fx.get_ebb(targets[targets.len() - 1]);
183 switch.emit(&mut fx.bcx, discr, otherwise_ebb);
185 TerminatorKind::Call {
192 crate::abi::codegen_terminator_call(fx, func, args, destination);
194 TerminatorKind::Resume | TerminatorKind::Abort => {
195 trap_unreachable(fx, "[corruption] Unwinding bb reached.");
197 TerminatorKind::Unreachable => {
198 trap_unreachable(fx, "[corruption] Hit unreachable code.");
200 TerminatorKind::Yield { .. }
201 | TerminatorKind::FalseEdges { .. }
202 | TerminatorKind::FalseUnwind { .. }
203 | TerminatorKind::DropAndReplace { .. }
204 | TerminatorKind::GeneratorDrop => {
205 bug!("shouldn't exist at trans {:?}", bb_data.terminator());
207 TerminatorKind::Drop {
212 let drop_place = trans_place(fx, location);
213 crate::abi::codegen_drop(fx, drop_place);
215 let target_ebb = fx.get_ebb(*target);
216 fx.bcx.ins().jump(target_ebb, &[]);
221 fx.bcx.seal_all_blocks();
225 fn trans_stmt<'a, 'tcx: 'a>(
226 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
228 stmt: &Statement<'tcx>,
230 let _print_guard = PrintOnPanic(|| format!("stmt {:?}", stmt));
232 fx.set_debug_loc(stmt.source_info);
234 #[cfg(debug_assertions)]
236 StatementKind::StorageLive(..) | StatementKind::StorageDead(..) => {} // Those are not very useful
238 let inst = fx.bcx.func.layout.last_inst(cur_ebb).unwrap();
239 fx.add_comment(inst, format!("{:?}", stmt));
244 StatementKind::SetDiscriminant {
248 let place = trans_place(fx, place);
249 crate::discriminant::codegen_set_discriminant(fx, place, *variant_index);
251 StatementKind::Assign(to_place, rval) => {
252 let lval = trans_place(fx, to_place);
253 let dest_layout = lval.layout();
255 Rvalue::Use(operand) => {
256 let val = trans_operand(fx, operand);
257 lval.write_cvalue(fx, val);
259 Rvalue::Ref(_, _, place) => {
260 let place = trans_place(fx, place);
261 place.write_place_ref(fx, lval);
263 Rvalue::BinaryOp(bin_op, lhs, rhs) => {
264 let ty = fx.monomorphize(&lhs.ty(fx.mir, fx.tcx));
265 let lhs = trans_operand(fx, lhs);
266 let rhs = trans_operand(fx, rhs);
268 let res = match ty.sty {
269 ty::Bool => crate::num::trans_bool_binop(fx, *bin_op, lhs, rhs),
271 crate::num::trans_int_binop(fx, *bin_op, lhs, rhs, lval.layout().ty, false)
274 crate::num::trans_int_binop(fx, *bin_op, lhs, rhs, lval.layout().ty, true)
276 ty::Float(_) => crate::num::trans_float_binop(fx, *bin_op, lhs, rhs, lval.layout().ty),
277 ty::Char => crate::num::trans_char_binop(fx, *bin_op, lhs, rhs, lval.layout().ty),
278 ty::RawPtr(..) | ty::FnPtr(..) => {
279 crate::num::trans_ptr_binop(fx, *bin_op, lhs, rhs, lval.layout().ty)
281 _ => unimplemented!("binop {:?} for {:?}", bin_op, ty),
283 lval.write_cvalue(fx, res);
285 Rvalue::CheckedBinaryOp(bin_op, lhs, rhs) => {
286 let ty = fx.monomorphize(&lhs.ty(fx.mir, fx.tcx));
287 let lhs = trans_operand(fx, lhs);
288 let rhs = trans_operand(fx, rhs);
290 let signed = type_sign(ty);
292 let res = if !fx.tcx.sess.overflow_checks() {
293 let val = crate::num::trans_int_binop(fx, *bin_op, lhs, rhs, lhs.layout().ty, signed).load_scalar(fx);
294 let is_overflow = fx.bcx.ins().iconst(types::I8, 0);
295 CValue::by_val_pair(val, is_overflow, lval.layout())
297 crate::num::trans_checked_int_binop(fx, *bin_op, lhs, rhs, lval.layout().ty, signed)
300 lval.write_cvalue(fx, res);
302 Rvalue::UnaryOp(un_op, operand) => {
303 let operand = trans_operand(fx, operand);
304 let layout = operand.layout();
305 let val = operand.load_scalar(fx);
306 let res = match un_op {
308 match layout.ty.sty {
310 let val = fx.bcx.ins().uextend(types::I32, val); // WORKAROUND for CraneStation/cranelift#466
311 let res = fx.bcx.ins().icmp_imm(IntCC::Equal, val, 0);
312 fx.bcx.ins().bint(types::I8, res)
314 ty::Uint(_) | ty::Int(_) => {
315 fx.bcx.ins().bnot(val)
317 _ => unimplemented!("un op Not for {:?}", layout.ty),
320 UnOp::Neg => match layout.ty.sty {
322 let clif_ty = fx.clif_type(layout.ty).unwrap();
323 if clif_ty == types::I128 {
324 crate::trap::trap_unreachable_ret_value(fx, layout, "i128 neg is not yet supported").load_scalar(fx)
326 let zero = fx.bcx.ins().iconst(clif_ty, 0);
327 fx.bcx.ins().isub(zero, val)
330 ty::Float(_) => fx.bcx.ins().fneg(val),
331 _ => unimplemented!("un op Neg for {:?}", layout.ty),
334 lval.write_cvalue(fx, CValue::by_val(res, layout));
336 Rvalue::Cast(CastKind::Pointer(PointerCast::ReifyFnPointer), operand, ty) => {
337 let layout = fx.layout_of(ty);
339 .monomorphize(&operand.ty(&fx.mir.local_decls, fx.tcx))
342 ty::FnDef(def_id, substs) => {
343 let func_ref = fx.get_function_ref(
344 Instance::resolve(fx.tcx, ParamEnv::reveal_all(), def_id, substs)
347 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
348 lval.write_cvalue(fx, CValue::by_val(func_addr, layout));
350 _ => bug!("Trying to ReifyFnPointer on non FnDef {:?}", ty),
353 Rvalue::Cast(CastKind::Pointer(PointerCast::UnsafeFnPointer), operand, ty)
354 | Rvalue::Cast(CastKind::Pointer(PointerCast::MutToConstPointer), operand, ty) => {
355 let operand = trans_operand(fx, operand);
356 let layout = fx.layout_of(ty);
357 lval.write_cvalue(fx, operand.unchecked_cast_to(layout));
359 Rvalue::Cast(CastKind::Misc, operand, to_ty) => {
360 let operand = trans_operand(fx, operand);
361 let from_ty = operand.layout().ty;
363 fn is_fat_ptr<'a, 'tcx: 'a>(fx: &FunctionCx<'a, 'tcx, impl Backend>, ty: Ty<'tcx>) -> bool {
366 .map(|ty::TypeAndMut {ty: pointee_ty, mutbl: _ }| fx.layout_of(pointee_ty).is_unsized())
370 if is_fat_ptr(fx, from_ty) {
371 if is_fat_ptr(fx, to_ty) {
372 // fat-ptr -> fat-ptr
373 lval.write_cvalue(fx, operand.unchecked_cast_to(dest_layout));
375 // fat-ptr -> thin-ptr
376 let (ptr, _extra) = operand.load_scalar_pair(fx);
377 lval.write_cvalue(fx, CValue::by_val(ptr, dest_layout))
379 } else if let ty::Adt(adt_def, _substs) = from_ty.sty {
380 // enum -> discriminant value
381 assert!(adt_def.is_enum());
383 ty::Uint(_) | ty::Int(_) => {},
384 _ => unreachable!("cast adt {} -> {}", from_ty, to_ty),
387 // FIXME avoid forcing to stack
389 CPlace::for_addr(operand.force_stack(fx), operand.layout());
390 let discr = crate::discriminant::codegen_get_discriminant(fx, place, fx.layout_of(to_ty));
391 lval.write_cvalue(fx, discr);
393 let to_clif_ty = fx.clif_type(to_ty).unwrap();
394 let from = operand.load_scalar(fx);
396 let res = clif_int_or_float_cast(fx, from, type_sign(from_ty), to_clif_ty, type_sign(to_ty));
397 lval.write_cvalue(fx, CValue::by_val(res, dest_layout));
400 Rvalue::Cast(CastKind::Pointer(PointerCast::ClosureFnPointer(_)), operand, _ty) => {
401 let operand = trans_operand(fx, operand);
402 match operand.layout().ty.sty {
403 ty::Closure(def_id, substs) => {
404 let instance = Instance::resolve_closure(
408 ty::ClosureKind::FnOnce,
410 let func_ref = fx.get_function_ref(instance);
411 let func_addr = fx.bcx.ins().func_addr(fx.pointer_type, func_ref);
412 lval.write_cvalue(fx, CValue::by_val(func_addr, lval.layout()));
415 bug!("{} cannot be cast to a fn ptr", operand.layout().ty)
419 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), operand, _ty) => {
420 let operand = trans_operand(fx, operand);
421 operand.unsize_value(fx, lval);
423 Rvalue::Discriminant(place) => {
424 let place = trans_place(fx, place);
425 let discr = crate::discriminant::codegen_get_discriminant(fx, place, dest_layout);
426 lval.write_cvalue(fx, discr);
428 Rvalue::Repeat(operand, times) => {
429 let operand = trans_operand(fx, operand);
431 let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64);
432 let to = lval.place_index(fx, index);
433 to.write_cvalue(fx, operand);
436 Rvalue::Len(place) => {
437 let place = trans_place(fx, place);
438 let usize_layout = fx.layout_of(fx.tcx.types.usize);
439 let len = codegen_array_len(fx, place);
440 lval.write_cvalue(fx, CValue::by_val(len, usize_layout));
442 Rvalue::NullaryOp(NullOp::Box, content_ty) => {
443 use rustc::middle::lang_items::ExchangeMallocFnLangItem;
445 let usize_type = fx.clif_type(fx.tcx.types.usize).unwrap();
446 let layout = fx.layout_of(content_ty);
447 let llsize = fx.bcx.ins().iconst(usize_type, layout.size.bytes() as i64);
451 .iconst(usize_type, layout.align.abi.bytes() as i64);
452 let box_layout = fx.layout_of(fx.tcx.mk_box(content_ty));
455 let def_id = match fx.tcx.lang_items().require(ExchangeMallocFnLangItem) {
460 .fatal(&format!("allocation of `{}` {}", box_layout.ty, s));
463 let instance = ty::Instance::mono(fx.tcx, def_id);
464 let func_ref = fx.get_function_ref(instance);
465 let call = fx.bcx.ins().call(func_ref, &[llsize, llalign]);
466 let ptr = fx.bcx.inst_results(call)[0];
467 lval.write_cvalue(fx, CValue::by_val(ptr, box_layout));
469 Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
473 .is_sized(fx.tcx.at(DUMMY_SP), ParamEnv::reveal_all()));
474 let ty_size = fx.layout_of(ty).size.bytes();
475 let val = CValue::const_val(fx, fx.tcx.types.usize, ty_size.into());
476 lval.write_cvalue(fx, val);
478 Rvalue::Aggregate(kind, operands) => match **kind {
479 AggregateKind::Array(_ty) => {
480 for (i, operand) in operands.into_iter().enumerate() {
481 let operand = trans_operand(fx, operand);
482 let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64);
483 let to = lval.place_index(fx, index);
484 to.write_cvalue(fx, operand);
487 _ => unimpl!("shouldn't exist at trans {:?}", rval),
491 StatementKind::StorageLive(_)
492 | StatementKind::StorageDead(_)
494 | StatementKind::FakeRead(..)
495 | StatementKind::Retag { .. }
496 | StatementKind::AscribeUserType(..) => {}
498 StatementKind::InlineAsm(asm) => {
499 use syntax::ast::Name;
500 let InlineAsm { asm, outputs: _, inputs: _ } = &**asm;
501 let rustc::hir::InlineAsm {
502 asm: asm_code, // Name
503 outputs, // Vec<Name>
505 clobbers, // Vec<Name>
508 dialect: _, // syntax::ast::AsmDialect
512 match &*asm_code.as_str() {
513 "cpuid" | "cpuid\n" => {
514 assert_eq!(inputs, &[Name::intern("{eax}"), Name::intern("{ecx}")]);
516 assert_eq!(outputs.len(), 4);
517 for (i, c) in (&["={eax}", "={ebx}", "={ecx}", "={edx}"]).iter().enumerate() {
518 assert_eq!(&outputs[i].constraint.as_str(), c);
519 assert!(!outputs[i].is_rw);
520 assert!(!outputs[i].is_indirect);
523 assert_eq!(clobbers, &[Name::intern("rbx")]);
526 assert!(!alignstack);
528 crate::trap::trap_unimplemented(fx, "__cpuid_count arch intrinsic is not supported");
531 assert_eq!(inputs, &[Name::intern("{ecx}")]);
533 assert_eq!(outputs.len(), 2);
534 for (i, c) in (&["={eax}", "={edx}"]).iter().enumerate() {
535 assert_eq!(&outputs[i].constraint.as_str(), c);
536 assert!(!outputs[i].is_rw);
537 assert!(!outputs[i].is_indirect);
540 assert_eq!(clobbers, &[]);
543 assert!(!alignstack);
545 crate::trap::trap_unimplemented(fx, "_xgetbv arch intrinsic is not supported");
547 _ if fx.tcx.symbol_name(fx.instance).as_str() == "__rust_probestack" => {
548 crate::trap::trap_unimplemented(fx, "__rust_probestack is not supported");
550 _ => unimpl!("Inline assembly is not supported"),
556 fn codegen_array_len<'a, 'tcx: 'a>(
557 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
560 match place.layout().ty.sty {
561 ty::Array(_elem_ty, len) => {
562 let len = crate::constant::force_eval_const(fx, len)
563 .eval_usize(fx.tcx, ParamEnv::reveal_all()) as i64;
564 fx.bcx.ins().iconst(fx.pointer_type, len)
566 ty::Slice(_elem_ty) => place
567 .to_addr_maybe_unsized(fx)
569 .expect("Length metadata for slice place"),
570 _ => bug!("Rvalue::Len({:?})", place),
574 pub fn trans_place<'a, 'tcx: 'a>(
575 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
578 let base = match &place.base {
579 PlaceBase::Local(local) => fx.get_local_place(*local),
580 PlaceBase::Static(static_) => match static_.kind {
581 StaticKind::Static(def_id) => {
582 crate::constant::codegen_static_ref(fx, def_id, static_.ty)
584 StaticKind::Promoted(promoted) => {
585 crate::constant::trans_promoted(fx, promoted, static_.ty)
590 trans_place_projection(fx, base, &place.projection)
593 pub fn trans_place_projection<'a, 'tcx: 'a>(
594 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
596 projection: &Option<Box<Projection<'tcx>>>,
598 let projection = if let Some(projection) = projection {
604 let base = trans_place_projection(fx, base, &projection.base);
606 match projection.elem {
607 ProjectionElem::Deref => base.place_deref(fx),
608 ProjectionElem::Field(field, _ty) => base.place_field(fx, field),
609 ProjectionElem::Index(local) => {
610 let index = fx.get_local_place(local).to_cvalue(fx).load_scalar(fx);
611 base.place_index(fx, index)
613 ProjectionElem::ConstantIndex {
618 let index = if !from_end {
619 fx.bcx.ins().iconst(fx.pointer_type, offset as i64)
621 let len = codegen_array_len(fx, base);
622 fx.bcx.ins().iadd_imm(len, -(offset as i64))
624 base.place_index(fx, index)
626 ProjectionElem::Subslice { from, to } => {
627 // These indices are generated by slice patterns.
628 // slice[from:-to] in Python terms.
630 match base.layout().ty.sty {
631 ty::Array(elem_ty, len) => {
632 let elem_layout = fx.layout_of(elem_ty);
633 let ptr = base.to_addr(fx);
634 let len = crate::constant::force_eval_const(fx, len)
635 .eval_usize(fx.tcx, ParamEnv::reveal_all());
637 fx.bcx.ins().iadd_imm(ptr, elem_layout.size.bytes() as i64 * from as i64),
638 fx.layout_of(fx.tcx.mk_array(elem_ty, len - from as u64 - to as u64)),
641 ty::Slice(elem_ty) => {
642 let elem_layout = fx.layout_of(elem_ty);
643 let (ptr, len) = base.to_addr_maybe_unsized(fx);
644 let len = len.unwrap();
645 CPlace::for_addr_with_extra(
646 fx.bcx.ins().iadd_imm(ptr, elem_layout.size.bytes() as i64 * from as i64),
647 fx.bcx.ins().iadd_imm(len, -(from as i64 + to as i64)),
654 ProjectionElem::Downcast(_adt_def, variant) => base.downcast_variant(fx, variant),
658 pub fn trans_operand<'a, 'tcx>(
659 fx: &mut FunctionCx<'a, 'tcx, impl Backend>,
660 operand: &Operand<'tcx>,
663 Operand::Move(place) | Operand::Copy(place) => {
664 let cplace = trans_place(fx, place);
667 Operand::Constant(const_) => crate::constant::trans_constant(fx, const_),