1 use rustc::ty::{self, Ty, TypeFoldable, UpvarSubsts, Instance};
2 use rustc::ty::layout::{TyLayout, HasTyCtxt, FnTypeExt};
3 use rustc::mir::{self, Body};
4 use rustc::session::config::DebugInfo;
5 use rustc_target::abi::call::{FnType, PassMode};
6 use rustc_target::abi::{Variants, VariantIdx};
8 use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext};
11 use syntax_pos::{DUMMY_SP, BytePos, Span};
12 use syntax::symbol::kw;
16 use rustc_data_structures::bit_set::BitSet;
17 use rustc_data_structures::indexed_vec::IndexVec;
19 use self::analyze::CleanupKind;
20 use self::place::PlaceRef;
21 use rustc::mir::traversal;
23 use self::operand::{OperandRef, OperandValue};
25 /// Master context for codegenning from MIR.
26 pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
27 instance: Instance<'tcx>,
29 mir: &'a mir::Body<'tcx>,
31 debug_context: FunctionDebugContext<Bx::DIScope>,
35 cx: &'a Bx::CodegenCx,
37 fn_ty: FnType<'tcx, Ty<'tcx>>,
39 /// When unwinding is initiated, we have to store this personality
40 /// value somewhere so that we can load it and re-use it in the
41 /// resume instruction. The personality is (afaik) some kind of
42 /// value used for C++ unwinding, which must filter by type: we
43 /// don't really care about it very much. Anyway, this value
44 /// contains an alloca into which the personality is stored and
45 /// then later loaded when generating the DIVERGE_BLOCK.
46 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
48 /// A `Block` for each MIR `BasicBlock`
49 blocks: IndexVec<mir::BasicBlock, Bx::BasicBlock>,
51 /// The funclet status of each basic block
52 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
54 /// When targeting MSVC, this stores the cleanup info for each funclet
55 /// BB. This is initialized as we compute the funclets' head block in RPO.
56 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
58 /// This stores the landing-pad block for a given BB, computed lazily on GNU
59 /// and eagerly on MSVC.
60 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
62 /// Cached unreachable block
63 unreachable_block: Option<Bx::BasicBlock>,
65 /// The location where each MIR arg/var/tmp/ret is stored. This is
66 /// usually an `PlaceRef` representing an alloca, but not always:
67 /// sometimes we can skip the alloca and just store the value
68 /// directly using an `OperandRef`, which makes for tighter LLVM
69 /// IR. The conditions for using an `OperandRef` are as follows:
71 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
72 /// - the operand must never be referenced indirectly
73 /// - we should not take its address using the `&` operator
74 /// - nor should it appear in a place path like `tmp.a`
75 /// - the operand must be defined by an rvalue that can generate immediate
78 /// Avoiding allocs can also be important for certain intrinsics,
80 locals: IndexVec<mir::Local, LocalRef<'tcx, Bx::Value>>,
82 /// Debug information for MIR scopes.
83 scopes: IndexVec<mir::SourceScope, debuginfo::MirDebugScope<Bx::DIScope>>,
86 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
87 pub fn monomorphize<T>(&self, value: &T) -> T
88 where T: TypeFoldable<'tcx>
90 self.cx.tcx().subst_and_normalize_erasing_regions(
92 ty::ParamEnv::reveal_all(),
100 source_info: mir::SourceInfo
102 let (scope, span) = self.debug_loc(source_info);
103 bx.set_source_location(&mut self.debug_context, scope, span);
106 pub fn debug_loc(&self, source_info: mir::SourceInfo) -> (Option<Bx::DIScope>, Span) {
107 // Bail out if debug info emission is not enabled.
108 match self.debug_context {
109 FunctionDebugContext::DebugInfoDisabled |
110 FunctionDebugContext::FunctionWithoutDebugInfo => {
111 return (self.scopes[source_info.scope].scope_metadata, source_info.span);
113 FunctionDebugContext::RegularContext(_) =>{}
116 // In order to have a good line stepping behavior in debugger, we overwrite debug
117 // locations of macro expansions with that of the outermost expansion site
118 // (unless the crate is being compiled with `-Z debug-macros`).
119 if !source_info.span.from_expansion() ||
120 self.cx.sess().opts.debugging_opts.debug_macros {
121 let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo());
122 (scope, source_info.span)
124 // Walk up the macro expansion chain until we reach a non-expanded span.
125 // We also stop at the function body level because no line stepping can occur
126 // at the level above that.
127 let span = syntax_pos::hygiene::walk_chain(source_info.span, self.mir.span.ctxt());
128 let scope = self.scope_metadata_for_loc(source_info.scope, span.lo());
129 // Use span of the outermost expansion site, while keeping the original lexical scope.
134 // DILocations inherit source file name from the parent DIScope. Due to macro expansions
135 // it may so happen that the current span belongs to a different file than the DIScope
136 // corresponding to span's containing source scope. If so, we need to create a DIScope
137 // "extension" into that file.
138 fn scope_metadata_for_loc(&self, scope_id: mir::SourceScope, pos: BytePos)
139 -> Option<Bx::DIScope> {
140 let scope_metadata = self.scopes[scope_id].scope_metadata;
141 if pos < self.scopes[scope_id].file_start_pos ||
142 pos >= self.scopes[scope_id].file_end_pos {
143 let sm = self.cx.sess().source_map();
144 let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate;
145 Some(self.cx.extend_scope_to_file(
146 scope_metadata.unwrap(),
147 &sm.lookup_char_pos(pos).file,
156 enum LocalRef<'tcx, V> {
157 Place(PlaceRef<'tcx, V>),
158 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
159 /// `*p` is the fat pointer that references the actual unsized place.
160 /// Every time it is initialized, we have to reallocate the place
161 /// and update the fat pointer. That's the reason why it is indirect.
162 UnsizedPlace(PlaceRef<'tcx, V>),
163 Operand(Option<OperandRef<'tcx, V>>),
166 impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> {
167 fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
169 layout: TyLayout<'tcx>,
170 ) -> LocalRef<'tcx, V> {
172 // Zero-size temporaries aren't always initialized, which
173 // doesn't matter because they don't contain data, but
174 // we need something in the operand.
175 LocalRef::Operand(Some(OperandRef::new_zst(bx, layout)))
177 LocalRef::Operand(None)
182 ///////////////////////////////////////////////////////////////////////////
184 pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
185 cx: &'a Bx::CodegenCx,
188 instance: Instance<'tcx>,
189 sig: ty::FnSig<'tcx>,
191 assert!(!instance.substs.needs_infer());
193 let fn_ty = FnType::new(cx, sig, &[]);
194 debug!("fn_ty: {:?}", fn_ty);
195 let mut debug_context =
196 cx.create_function_debug_context(instance, sig, llfn, mir);
197 let mut bx = Bx::new_block(cx, llfn, "start");
199 if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
200 bx.set_personality_fn(cx.eh_personality());
203 let cleanup_kinds = analyze::cleanup_kinds(&mir);
204 // Allocate a `Block` for every basic block, except
205 // the start block, if nothing loops back to it.
206 let reentrant_start_block = !mir.predecessors_for(mir::START_BLOCK).is_empty();
207 let block_bxs: IndexVec<mir::BasicBlock, Bx::BasicBlock> =
208 mir.basic_blocks().indices().map(|bb| {
209 if bb == mir::START_BLOCK && !reentrant_start_block {
212 bx.build_sibling_block(&format!("{:?}", bb)).llbb()
216 // Compute debuginfo scopes from MIR scopes.
217 let scopes = cx.create_mir_scopes(mir, &mut debug_context);
218 let (landing_pads, funclets) = create_funclets(mir, &mut bx, &cleanup_kinds, &block_bxs);
220 let mut fx = FunctionCx {
226 personality_slot: None,
228 unreachable_block: None,
233 locals: IndexVec::new(),
237 let memory_locals = analyze::non_ssa_locals(&fx);
239 // Allocate variable and temp allocas
241 let args = arg_local_refs(&mut bx, &fx, &memory_locals);
243 let mut allocate_local = |local| {
244 let decl = &mir.local_decls[local];
245 let layout = bx.layout_of(fx.monomorphize(&decl.ty));
246 assert!(!layout.ty.has_erasable_regions());
248 if let Some(name) = decl.name {
250 let debug_scope = fx.scopes[decl.visibility_scope];
251 let dbg = debug_scope.is_valid() &&
252 bx.sess().opts.debuginfo == DebugInfo::Full;
254 if !memory_locals.contains(local) && !dbg {
255 debug!("alloc: {:?} ({}) -> operand", local, name);
256 return LocalRef::new_operand(&mut bx, layout);
259 debug!("alloc: {:?} ({}) -> place", local, name);
260 if layout.is_unsized() {
262 PlaceRef::alloca_unsized_indirect(&mut bx, layout);
263 bx.set_var_name(indirect_place.llval, name);
264 // FIXME: add an appropriate debuginfo
265 LocalRef::UnsizedPlace(indirect_place)
267 let place = PlaceRef::alloca(&mut bx, layout);
268 bx.set_var_name(place.llval, name);
270 let (scope, span) = fx.debug_loc(mir::SourceInfo {
271 span: decl.source_info.span,
272 scope: decl.visibility_scope,
274 bx.declare_local(&fx.debug_context, name, layout.ty, scope.unwrap(),
275 VariableAccess::DirectVariable { alloca: place.llval },
276 VariableKind::LocalVariable, span);
278 LocalRef::Place(place)
281 // Temporary or return place
282 if local == mir::RETURN_PLACE && fx.fn_ty.ret.is_indirect() {
283 debug!("alloc: {:?} (return place) -> place", local);
284 let llretptr = bx.get_param(0);
285 LocalRef::Place(PlaceRef::new_sized(llretptr, layout))
286 } else if memory_locals.contains(local) {
287 debug!("alloc: {:?} -> place", local);
288 if layout.is_unsized() {
289 let indirect_place = PlaceRef::alloca_unsized_indirect(&mut bx, layout);
290 bx.set_var_name(indirect_place.llval, format_args!("{:?}", local));
291 LocalRef::UnsizedPlace(indirect_place)
293 let place = PlaceRef::alloca(&mut bx, layout);
294 bx.set_var_name(place.llval, format_args!("{:?}", local));
295 LocalRef::Place(place)
298 // If this is an immediate local, we do not create an
299 // alloca in advance. Instead we wait until we see the
300 // definition and update the operand there.
301 debug!("alloc: {:?} -> operand", local);
302 LocalRef::new_operand(&mut bx, layout)
307 let retptr = allocate_local(mir::RETURN_PLACE);
309 .chain(args.into_iter())
310 .chain(mir.vars_and_temps_iter().map(allocate_local))
314 // Branch to the START block, if it's not the entry block.
315 if reentrant_start_block {
316 bx.br(fx.blocks[mir::START_BLOCK]);
319 // Up until here, IR instructions for this function have explicitly not been annotated with
320 // source code location, so we don't step into call setup code. From here on, source location
321 // emitting should be enabled.
322 debuginfo::start_emitting_source_locations(&mut fx.debug_context);
324 let rpo = traversal::reverse_postorder(&mir);
325 let mut visited = BitSet::new_empty(mir.basic_blocks().len());
327 // Codegen the body of each block using reverse postorder
329 visited.insert(bb.index());
330 fx.codegen_block(bb);
333 // Remove blocks that haven't been visited, or have no
335 for bb in mir.basic_blocks().indices() {
337 if !visited.contains(bb.index()) {
338 debug!("codegen_mir: block {:?} was not visited", bb);
340 bx.delete_basic_block(fx.blocks[bb]);
346 fn create_funclets<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
349 cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
350 block_bxs: &IndexVec<mir::BasicBlock, Bx::BasicBlock>,
352 IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
353 IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
355 block_bxs.iter_enumerated().zip(cleanup_kinds).map(|((bb, &llbb), cleanup_kind)| {
356 match *cleanup_kind {
357 CleanupKind::Funclet if base::wants_msvc_seh(bx.sess()) => {}
358 _ => return (None, None)
363 match mir[bb].terminator.as_ref().map(|t| &t.kind) {
364 // This is a basic block that we're aborting the program for,
365 // notably in an `extern` function. These basic blocks are inserted
366 // so that we assert that `extern` functions do indeed not panic,
367 // and if they do we abort the process.
369 // On MSVC these are tricky though (where we're doing funclets). If
370 // we were to do a cleanuppad (like below) the normal functions like
371 // `longjmp` would trigger the abort logic, terminating the
372 // program. Instead we insert the equivalent of `catch(...)` for C++
373 // which magically doesn't trigger when `longjmp` files over this
376 // Lots more discussion can be found on #48251 but this codegen is
377 // modeled after clang's for:
384 Some(&mir::TerminatorKind::Abort) => {
385 let mut cs_bx = bx.build_sibling_block(&format!("cs_funclet{:?}", bb));
386 let mut cp_bx = bx.build_sibling_block(&format!("cp_funclet{:?}", bb));
387 ret_llbb = cs_bx.llbb();
389 let cs = cs_bx.catch_switch(None, None, 1);
390 cs_bx.add_handler(cs, cp_bx.llbb());
392 // The "null" here is actually a RTTI type descriptor for the
393 // C++ personality function, but `catch (...)` has no type so
394 // it's null. The 64 here is actually a bitfield which
395 // represents that this is a catch-all block.
396 let null = bx.const_null(bx.type_i8p());
397 let sixty_four = bx.const_i32(64);
398 funclet = cp_bx.catch_pad(cs, &[null, sixty_four, null]);
402 let mut cleanup_bx = bx.build_sibling_block(&format!("funclet_{:?}", bb));
403 ret_llbb = cleanup_bx.llbb();
404 funclet = cleanup_bx.cleanup_pad(None, &[]);
409 (Some(ret_llbb), Some(funclet))
413 /// Produces, for each argument, a `Value` pointing at the
414 /// argument's value. As arguments are places, these are always
416 fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
418 fx: &FunctionCx<'a, 'tcx, Bx>,
419 memory_locals: &BitSet<mir::Local>,
420 ) -> Vec<LocalRef<'tcx, Bx::Value>> {
422 let tcx = fx.cx.tcx();
424 let mut llarg_idx = fx.fn_ty.ret.is_indirect() as usize;
426 // Get the argument scope, if it exists and if we need it.
427 let arg_scope = fx.scopes[mir::OUTERMOST_SOURCE_SCOPE];
428 let arg_scope = if bx.sess().opts.debuginfo == DebugInfo::Full {
429 arg_scope.scope_metadata
434 mir.args_iter().enumerate().map(|(arg_index, local)| {
435 let arg_decl = &mir.local_decls[local];
437 // FIXME(eddyb) don't allocate a `String` unless it gets used.
438 let name = if let Some(name) = arg_decl.name {
439 name.as_str().to_string()
441 format!("{:?}", local)
444 if Some(local) == mir.spread_arg {
445 // This argument (e.g., the last argument in the "rust-call" ABI)
446 // is a tuple that was spread at the ABI level and now we have
447 // to reconstruct it into a tuple local variable, from multiple
448 // individual LLVM function arguments.
450 let arg_ty = fx.monomorphize(&arg_decl.ty);
451 let tupled_arg_tys = match arg_ty.kind {
452 ty::Tuple(ref tys) => tys,
453 _ => bug!("spread argument isn't a tuple?!")
456 let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
457 bx.set_var_name(place.llval, name);
458 for i in 0..tupled_arg_tys.len() {
459 let arg = &fx.fn_ty.args[idx];
461 if arg.pad.is_some() {
464 let pr_field = place.project_field(bx, i);
465 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
468 // Now that we have one alloca that contains the aggregate value,
469 // we can create one debuginfo entry for the argument.
470 arg_scope.map(|scope| {
471 let variable_access = VariableAccess::DirectVariable {
476 arg_decl.name.unwrap_or(kw::Invalid),
479 VariableKind::ArgumentVariable(arg_index + 1),
484 return LocalRef::Place(place);
487 if fx.fn_ty.c_variadic && arg_index == fx.fn_ty.args.len() {
488 let arg_ty = fx.monomorphize(&arg_decl.ty);
490 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
491 bx.set_var_name(va_list.llval, name);
492 bx.va_start(va_list.llval);
494 arg_scope.map(|scope| {
495 let variable_access = VariableAccess::DirectVariable {
496 alloca: va_list.llval
500 arg_decl.name.unwrap_or(kw::Invalid),
504 VariableKind::ArgumentVariable(arg_index + 1),
509 return LocalRef::Place(va_list);
512 let arg = &fx.fn_ty.args[idx];
514 if arg.pad.is_some() {
518 if arg_scope.is_none() && !memory_locals.contains(local) {
519 // We don't have to cast or keep the argument in the alloca.
520 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
521 // of putting everything in allocas just so we can use llvm.dbg.declare.
522 let local = |op| LocalRef::Operand(Some(op));
524 PassMode::Ignore => {
525 return local(OperandRef::new_zst(bx, arg.layout));
527 PassMode::Direct(_) => {
528 let llarg = bx.get_param(llarg_idx);
529 bx.set_var_name(llarg, &name);
532 OperandRef::from_immediate_or_packed_pair(bx, llarg, arg.layout));
534 PassMode::Pair(..) => {
535 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
538 // FIXME(eddyb) these are scalar components,
539 // maybe extract the high-level fields?
540 bx.set_var_name(a, format_args!("{}.0", name));
541 bx.set_var_name(b, format_args!("{}.1", name));
543 return local(OperandRef {
544 val: OperandValue::Pair(a, b),
552 let place = if arg.is_sized_indirect() {
553 // Don't copy an indirect argument to an alloca, the caller
554 // already put it in a temporary alloca and gave it up.
556 let llarg = bx.get_param(llarg_idx);
557 bx.set_var_name(llarg, &name);
559 PlaceRef::new_sized(llarg, arg.layout)
560 } else if arg.is_unsized_indirect() {
561 // As the storage for the indirect argument lives during
562 // the whole function call, we just copy the fat pointer.
563 let llarg = bx.get_param(llarg_idx);
565 let llextra = bx.get_param(llarg_idx);
567 let indirect_operand = OperandValue::Pair(llarg, llextra);
569 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
570 bx.set_var_name(tmp.llval, name);
571 indirect_operand.store(bx, tmp);
574 let tmp = PlaceRef::alloca(bx, arg.layout);
575 bx.set_var_name(tmp.llval, name);
576 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
579 let upvar_debuginfo = &mir.__upvar_debuginfo_codegen_only_do_not_use;
580 arg_scope.map(|scope| {
581 // Is this a regular argument?
582 if arg_index > 0 || upvar_debuginfo.is_empty() {
583 // The Rust ABI passes indirect variables using a pointer and a manual copy, so we
584 // need to insert a deref here, but the C ABI uses a pointer and a copy using the
585 // byval attribute, for which LLVM always does the deref itself,
586 // so we must not add it.
587 let variable_access = VariableAccess::DirectVariable {
593 arg_decl.name.unwrap_or(kw::Invalid),
597 VariableKind::ArgumentVariable(arg_index + 1),
603 let pin_did = tcx.lang_items().pin_type();
604 // Or is it the closure environment?
605 let (closure_layout, env_ref) = match arg.layout.ty.kind {
606 ty::RawPtr(ty::TypeAndMut { ty, .. }) |
607 ty::Ref(_, ty, _) => (bx.layout_of(ty), true),
608 ty::Adt(def, substs) if Some(def.did) == pin_did => {
609 match substs.type_at(0).kind {
610 ty::Ref(_, ty, _) => (bx.layout_of(ty), true),
611 _ => (arg.layout, false),
614 _ => (arg.layout, false)
617 let (def_id, upvar_substs) = match closure_layout.ty.kind {
618 ty::Closure(def_id, substs) => (def_id,
619 UpvarSubsts::Closure(substs)),
620 ty::Generator(def_id, substs, _) => (def_id, UpvarSubsts::Generator(substs)),
621 _ => bug!("upvar debuginfo with non-closure arg0 type `{}`", closure_layout.ty)
623 let upvar_tys = upvar_substs.upvar_tys(def_id, tcx);
626 let upvars = upvar_debuginfo
630 .map(|(i, (upvar, ty))| {
631 (None, i, upvar.debug_name, upvar.by_ref, ty, scope, DUMMY_SP)
634 let generator_fields = mir.generator_layout.as_ref().map(|generator_layout| {
635 let (def_id, gen_substs) = match closure_layout.ty.kind {
636 ty::Generator(def_id, substs, _) => (def_id, substs),
637 _ => bug!("generator layout without generator substs"),
639 let state_tys = gen_substs.state_tys(def_id, tcx);
641 generator_layout.variant_fields.iter()
644 .flat_map(move |(variant_idx, (fields, tys))| {
645 let variant_idx = Some(VariantIdx::from(variant_idx));
649 .filter_map(move |(i, (field, ty))| {
650 let decl = &generator_layout.
651 __local_debuginfo_codegen_only_do_not_use[*field];
652 if let Some(name) = decl.name {
653 let ty = fx.monomorphize(&ty);
654 let (var_scope, var_span) = fx.debug_loc(mir::SourceInfo {
655 span: decl.source_info.span,
656 scope: decl.visibility_scope,
658 let var_scope = var_scope.unwrap_or(scope);
659 Some((variant_idx, i, name, false, ty, var_scope, var_span))
665 }).into_iter().flatten();
667 upvars.chain(generator_fields)
670 for (variant_idx, field, name, by_ref, ty, var_scope, var_span) in extra_locals {
671 let fields = match variant_idx {
672 Some(variant_idx) => {
673 match &closure_layout.variants {
674 Variants::Multiple { variants, .. } => {
675 &variants[variant_idx].fields
677 _ => bug!("variant index on univariant layout"),
680 None => &closure_layout.fields,
682 let byte_offset_of_var_in_env = fields.offset(field).bytes();
684 let ops = bx.debuginfo_upvar_ops_sequence(byte_offset_of_var_in_env);
686 // The environment and the capture can each be indirect.
687 let mut ops = if env_ref { &ops[..] } else { &ops[1..] };
689 let ty = if let (true, &ty::Ref(_, ty, _)) = (by_ref, &ty.kind) {
692 ops = &ops[..ops.len() - 1];
696 let variable_access = VariableAccess::IndirectVariable {
698 address_operations: &ops
706 VariableKind::LocalVariable,
711 if arg.is_unsized_indirect() {
712 LocalRef::UnsizedPlace(place)
714 LocalRef::Place(place)