1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 use llvm::{self, ValueRef, BasicBlockRef};
13 use llvm::debuginfo::DIScope;
14 use rustc::ty::{self, Ty, TypeFoldable};
15 use rustc::ty::layout::{self, LayoutTyper};
16 use rustc::mir::{self, Mir};
17 use rustc::mir::tcx::LvalueTy;
18 use rustc::ty::subst::Substs;
19 use rustc::infer::TransNormalize;
20 use rustc::session::config::FullDebugInfo;
23 use common::{self, CrateContext, Funclet};
24 use debuginfo::{self, declare_local, VariableAccess, VariableKind, FunctionDebugContext};
25 use monomorphize::Instance;
29 use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span};
30 use syntax::symbol::keywords;
34 use rustc_data_structures::bitvec::BitVector;
35 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
37 pub use self::constant::trans_static_initializer;
39 use self::analyze::CleanupKind;
40 use self::lvalue::{Alignment, LvalueRef};
41 use rustc::mir::traversal;
43 use self::operand::{OperandRef, OperandValue};
45 /// Master context for translating MIR.
46 pub struct MirContext<'a, 'tcx:'a> {
47 mir: &'a mir::Mir<'tcx>,
49 debug_context: debuginfo::FunctionDebugContext,
53 ccx: &'a CrateContext<'a, 'tcx>,
57 /// When unwinding is initiated, we have to store this personality
58 /// value somewhere so that we can load it and re-use it in the
59 /// resume instruction. The personality is (afaik) some kind of
60 /// value used for C++ unwinding, which must filter by type: we
61 /// don't really care about it very much. Anyway, this value
62 /// contains an alloca into which the personality is stored and
63 /// then later loaded when generating the DIVERGE_BLOCK.
64 llpersonalityslot: Option<ValueRef>,
66 /// A `Block` for each MIR `BasicBlock`
67 blocks: IndexVec<mir::BasicBlock, BasicBlockRef>,
69 /// The funclet status of each basic block
70 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
72 /// When targeting MSVC, this stores the cleanup info for each funclet
73 /// BB. This is initialized as we compute the funclets' head block in RPO.
74 funclets: &'a IndexVec<mir::BasicBlock, Option<Funclet>>,
76 /// This stores the landing-pad block for a given BB, computed lazily on GNU
77 /// and eagerly on MSVC.
78 landing_pads: IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
80 /// Cached unreachable block
81 unreachable_block: Option<BasicBlockRef>,
83 /// The location where each MIR arg/var/tmp/ret is stored. This is
84 /// usually an `LvalueRef` representing an alloca, but not always:
85 /// sometimes we can skip the alloca and just store the value
86 /// directly using an `OperandRef`, which makes for tighter LLVM
87 /// IR. The conditions for using an `OperandRef` are as follows:
89 /// - the type of the local must be judged "immediate" by `type_is_immediate`
90 /// - the operand must never be referenced indirectly
91 /// - we should not take its address using the `&` operator
92 /// - nor should it appear in an lvalue path like `tmp.a`
93 /// - the operand must be defined by an rvalue that can generate immediate
96 /// Avoiding allocs can also be important for certain intrinsics,
98 locals: IndexVec<mir::Local, LocalRef<'tcx>>,
100 /// Debug information for MIR scopes.
101 scopes: IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
103 /// If this function is being monomorphized, this contains the type substitutions used.
104 param_substs: &'tcx Substs<'tcx>,
107 impl<'a, 'tcx> MirContext<'a, 'tcx> {
108 pub fn monomorphize<T>(&self, value: &T) -> T
109 where T: TransNormalize<'tcx>
111 self.ccx.tcx().trans_apply_param_substs(self.param_substs, value)
114 pub fn set_debug_loc(&mut self, bcx: &Builder, source_info: mir::SourceInfo) {
115 let (scope, span) = self.debug_loc(source_info);
116 debuginfo::set_source_location(&self.debug_context, bcx, scope, span);
119 pub fn debug_loc(&mut self, source_info: mir::SourceInfo) -> (DIScope, Span) {
120 // Bail out if debug info emission is not enabled.
121 match self.debug_context {
122 FunctionDebugContext::DebugInfoDisabled |
123 FunctionDebugContext::FunctionWithoutDebugInfo => {
124 return (self.scopes[source_info.scope].scope_metadata, source_info.span);
126 FunctionDebugContext::RegularContext(_) =>{}
129 // In order to have a good line stepping behavior in debugger, we overwrite debug
130 // locations of macro expansions with that of the outermost expansion site
131 // (unless the crate is being compiled with `-Z debug-macros`).
132 if source_info.span.ctxt == NO_EXPANSION ||
133 self.ccx.sess().opts.debugging_opts.debug_macros {
134 let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo);
135 (scope, source_info.span)
137 // Walk up the macro expansion chain until we reach a non-expanded span.
138 // We also stop at the function body level because no line stepping can occurr
139 // at the level above that.
140 let mut span = source_info.span;
141 while span.ctxt != NO_EXPANSION && span.ctxt != self.mir.span.ctxt {
142 if let Some(info) = span.ctxt.outer().expn_info() {
143 span = info.call_site;
148 let scope = self.scope_metadata_for_loc(source_info.scope, span.lo);
149 // Use span of the outermost expansion site, while keeping the original lexical scope.
154 // DILocations inherit source file name from the parent DIScope. Due to macro expansions
155 // it may so happen that the current span belongs to a different file than the DIScope
156 // corresponding to span's containing visibility scope. If so, we need to create a DIScope
157 // "extension" into that file.
158 fn scope_metadata_for_loc(&self, scope_id: mir::VisibilityScope, pos: BytePos)
159 -> llvm::debuginfo::DIScope {
160 let scope_metadata = self.scopes[scope_id].scope_metadata;
161 if pos < self.scopes[scope_id].file_start_pos ||
162 pos >= self.scopes[scope_id].file_end_pos {
163 let cm = self.ccx.sess().codemap();
164 let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate;
165 debuginfo::extend_scope_to_file(self.ccx,
167 &cm.lookup_char_pos(pos).file,
175 enum LocalRef<'tcx> {
176 Lvalue(LvalueRef<'tcx>),
177 Operand(Option<OperandRef<'tcx>>),
180 impl<'tcx> LocalRef<'tcx> {
181 fn new_operand<'a>(ccx: &CrateContext<'a, 'tcx>,
182 ty: Ty<'tcx>) -> LocalRef<'tcx> {
183 if common::type_is_zero_size(ccx, ty) {
184 // Zero-size temporaries aren't always initialized, which
185 // doesn't matter because they don't contain data, but
186 // we need something in the operand.
187 LocalRef::Operand(Some(OperandRef::new_zst(ccx, ty)))
189 LocalRef::Operand(None)
194 ///////////////////////////////////////////////////////////////////////////
196 pub fn trans_mir<'a, 'tcx: 'a>(
197 ccx: &'a CrateContext<'a, 'tcx>,
200 instance: Instance<'tcx>,
201 sig: ty::FnSig<'tcx>,
203 let fn_ty = FnType::new(ccx, sig, &[]);
204 debug!("fn_ty: {:?}", fn_ty);
206 debuginfo::create_function_debug_context(ccx, instance, sig, llfn, mir);
207 let bcx = Builder::new_block(ccx, llfn, "start");
209 if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
210 bcx.set_personality_fn(ccx.eh_personality());
213 let cleanup_kinds = analyze::cleanup_kinds(&mir);
214 // Allocate a `Block` for every basic block, except
215 // the start block, if nothing loops back to it.
216 let reentrant_start_block = !mir.predecessors_for(mir::START_BLOCK).is_empty();
217 let block_bcxs: IndexVec<mir::BasicBlock, BasicBlockRef> =
218 mir.basic_blocks().indices().map(|bb| {
219 if bb == mir::START_BLOCK && !reentrant_start_block {
222 bcx.build_sibling_block(&format!("{:?}", bb)).llbb()
226 // Compute debuginfo scopes from MIR scopes.
227 let scopes = debuginfo::create_mir_scopes(ccx, mir, &debug_context);
228 let (landing_pads, funclets) = create_funclets(&bcx, &cleanup_kinds, &block_bcxs);
230 let mut mircx = MirContext {
235 llpersonalityslot: None,
237 unreachable_block: None,
238 cleanup_kinds: cleanup_kinds,
239 landing_pads: landing_pads,
242 locals: IndexVec::new(),
243 debug_context: debug_context,
245 assert!(!instance.substs.needs_infer());
250 let lvalue_locals = analyze::lvalue_locals(&mircx);
252 // Allocate variable and temp allocas
254 let args = arg_local_refs(&bcx, &mircx, &mircx.scopes, &lvalue_locals);
256 let mut allocate_local = |local| {
257 let decl = &mir.local_decls[local];
258 let ty = mircx.monomorphize(&decl.ty);
260 if let Some(name) = decl.name {
262 let debug_scope = mircx.scopes[decl.source_info.scope];
263 let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo;
265 if !lvalue_locals.contains(local.index()) && !dbg {
266 debug!("alloc: {:?} ({}) -> operand", local, name);
267 return LocalRef::new_operand(bcx.ccx, ty);
270 debug!("alloc: {:?} ({}) -> lvalue", local, name);
271 assert!(!ty.has_erasable_regions());
272 let lvalue = LvalueRef::alloca(&bcx, ty, &name.as_str());
274 let (scope, span) = mircx.debug_loc(decl.source_info);
275 declare_local(&bcx, &mircx.debug_context, name, ty, scope,
276 VariableAccess::DirectVariable { alloca: lvalue.llval },
277 VariableKind::LocalVariable, span);
279 LocalRef::Lvalue(lvalue)
281 // Temporary or return pointer
282 if local == mir::RETURN_POINTER && mircx.fn_ty.ret.is_indirect() {
283 debug!("alloc: {:?} (return pointer) -> lvalue", local);
284 let llretptr = llvm::get_param(llfn, 0);
285 LocalRef::Lvalue(LvalueRef::new_sized(llretptr, LvalueTy::from_ty(ty),
286 Alignment::AbiAligned))
287 } else if lvalue_locals.contains(local.index()) {
288 debug!("alloc: {:?} -> lvalue", local);
289 assert!(!ty.has_erasable_regions());
290 LocalRef::Lvalue(LvalueRef::alloca(&bcx, ty, &format!("{:?}", local)))
292 // If this is an immediate local, we do not create an
293 // alloca in advance. Instead we wait until we see the
294 // definition and update the operand there.
295 debug!("alloc: {:?} -> operand", local);
296 LocalRef::new_operand(bcx.ccx, ty)
301 let retptr = allocate_local(mir::RETURN_POINTER);
303 .chain(args.into_iter())
304 .chain(mir.vars_and_temps_iter().map(allocate_local))
308 // Branch to the START block, if it's not the entry block.
309 if reentrant_start_block {
310 bcx.br(mircx.blocks[mir::START_BLOCK]);
313 // Up until here, IR instructions for this function have explicitly not been annotated with
314 // source code location, so we don't step into call setup code. From here on, source location
315 // emitting should be enabled.
316 debuginfo::start_emitting_source_locations(&mircx.debug_context);
318 let rpo = traversal::reverse_postorder(&mir);
319 let mut visited = BitVector::new(mir.basic_blocks().len());
321 // Translate the body of each block using reverse postorder
323 visited.insert(bb.index());
324 mircx.trans_block(bb);
327 // Remove blocks that haven't been visited, or have no
329 for bb in mir.basic_blocks().indices() {
331 if !visited.contains(bb.index()) {
332 debug!("trans_mir: block {:?} was not visited", bb);
334 llvm::LLVMDeleteBasicBlock(mircx.blocks[bb]);
340 fn create_funclets<'a, 'tcx>(
341 bcx: &Builder<'a, 'tcx>,
342 cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
343 block_bcxs: &IndexVec<mir::BasicBlock, BasicBlockRef>)
344 -> (IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
345 IndexVec<mir::BasicBlock, Option<Funclet>>)
347 block_bcxs.iter_enumerated().zip(cleanup_kinds).map(|((bb, &llbb), cleanup_kind)| {
348 match *cleanup_kind {
349 CleanupKind::Funclet if base::wants_msvc_seh(bcx.sess()) => {
350 let cleanup_bcx = bcx.build_sibling_block(&format!("funclet_{:?}", bb));
351 let cleanup = cleanup_bcx.cleanup_pad(None, &[]);
352 cleanup_bcx.br(llbb);
353 (Some(cleanup_bcx.llbb()), Some(Funclet::new(cleanup)))
360 /// Produce, for each argument, a `ValueRef` pointing at the
361 /// argument's value. As arguments are lvalues, these are always
363 fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
364 mircx: &MirContext<'a, 'tcx>,
365 scopes: &IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
366 lvalue_locals: &BitVector)
367 -> Vec<LocalRef<'tcx>> {
371 let mut llarg_idx = mircx.fn_ty.ret.is_indirect() as usize;
373 // Get the argument scope, if it exists and if we need it.
374 let arg_scope = scopes[mir::ARGUMENT_VISIBILITY_SCOPE];
375 let arg_scope = if arg_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo {
376 Some(arg_scope.scope_metadata)
381 mir.args_iter().enumerate().map(|(arg_index, local)| {
382 let arg_decl = &mir.local_decls[local];
383 let arg_ty = mircx.monomorphize(&arg_decl.ty);
385 if Some(local) == mir.spread_arg {
386 // This argument (e.g. the last argument in the "rust-call" ABI)
387 // is a tuple that was spread at the ABI level and now we have
388 // to reconstruct it into a tuple local variable, from multiple
389 // individual LLVM function arguments.
391 let tupled_arg_tys = match arg_ty.sty {
392 ty::TyTuple(ref tys, _) => tys,
393 _ => bug!("spread argument isn't a tuple?!")
396 let lvalue = LvalueRef::alloca(bcx, arg_ty, &format!("arg{}", arg_index));
397 for (i, &tupled_arg_ty) in tupled_arg_tys.iter().enumerate() {
398 let (dst, _) = lvalue.trans_field_ptr(bcx, i);
399 let arg = &mircx.fn_ty.args[idx];
401 if common::type_is_fat_ptr(bcx.ccx, tupled_arg_ty) {
402 // We pass fat pointers as two words, but inside the tuple
403 // they are the two sub-fields of a single aggregate field.
404 let meta = &mircx.fn_ty.args[idx];
406 arg.store_fn_arg(bcx, &mut llarg_idx, base::get_dataptr(bcx, dst));
407 meta.store_fn_arg(bcx, &mut llarg_idx, base::get_meta(bcx, dst));
409 arg.store_fn_arg(bcx, &mut llarg_idx, dst);
413 // Now that we have one alloca that contains the aggregate value,
414 // we can create one debuginfo entry for the argument.
415 arg_scope.map(|scope| {
416 let variable_access = VariableAccess::DirectVariable {
421 &mircx.debug_context,
422 arg_decl.name.unwrap_or(keywords::Invalid.name()),
425 VariableKind::ArgumentVariable(arg_index + 1),
430 return LocalRef::Lvalue(lvalue);
433 let arg = &mircx.fn_ty.args[idx];
435 let llval = if arg.is_indirect() && bcx.sess().opts.debuginfo != FullDebugInfo {
436 // Don't copy an indirect argument to an alloca, the caller
437 // already put it in a temporary alloca and gave it up, unless
438 // we emit extra-debug-info, which requires local allocas :(.
440 if arg.pad.is_some() {
443 let llarg = llvm::get_param(bcx.llfn(), llarg_idx as c_uint);
446 } else if !lvalue_locals.contains(local.index()) &&
447 !arg.is_indirect() && arg.cast.is_none() &&
448 arg_scope.is_none() {
450 return LocalRef::new_operand(bcx.ccx, arg_ty);
453 // We don't have to cast or keep the argument in the alloca.
454 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
455 // of putting everything in allocas just so we can use llvm.dbg.declare.
456 if arg.pad.is_some() {
459 let llarg = llvm::get_param(bcx.llfn(), llarg_idx as c_uint);
461 let val = if common::type_is_fat_ptr(bcx.ccx, arg_ty) {
462 let meta = &mircx.fn_ty.args[idx];
464 assert_eq!((meta.cast, meta.pad), (None, None));
465 let llmeta = llvm::get_param(bcx.llfn(), llarg_idx as c_uint);
468 // FIXME(eddyb) As we can't perfectly represent the data and/or
469 // vtable pointer in a fat pointers in Rust's typesystem, and
470 // because we split fat pointers into two ArgType's, they're
471 // not the right type so we have to cast them for now.
472 let pointee = match arg_ty.sty {
473 ty::TyRef(_, ty::TypeAndMut{ty, ..}) |
474 ty::TyRawPtr(ty::TypeAndMut{ty, ..}) => ty,
475 ty::TyAdt(def, _) if def.is_box() => arg_ty.boxed_ty(),
478 let data_llty = type_of::in_memory_type_of(bcx.ccx, pointee);
479 let meta_llty = type_of::unsized_info_ty(bcx.ccx, pointee);
481 let llarg = bcx.pointercast(llarg, data_llty.ptr_to());
482 let llmeta = bcx.pointercast(llmeta, meta_llty);
484 OperandValue::Pair(llarg, llmeta)
486 OperandValue::Immediate(llarg)
488 let operand = OperandRef {
492 return LocalRef::Operand(Some(operand.unpack_if_pair(bcx)));
494 let lltemp = LvalueRef::alloca(bcx, arg_ty, &format!("arg{}", arg_index));
495 if common::type_is_fat_ptr(bcx.ccx, arg_ty) {
496 // we pass fat pointers as two words, but we want to
497 // represent them internally as a pointer to two words,
498 // so make an alloca to store them in.
499 let meta = &mircx.fn_ty.args[idx];
501 arg.store_fn_arg(bcx, &mut llarg_idx, base::get_dataptr(bcx, lltemp.llval));
502 meta.store_fn_arg(bcx, &mut llarg_idx, base::get_meta(bcx, lltemp.llval));
504 // otherwise, arg is passed by value, so make a
505 // temporary and store it there
506 arg.store_fn_arg(bcx, &mut llarg_idx, lltemp.llval);
510 arg_scope.map(|scope| {
511 // Is this a regular argument?
512 if arg_index > 0 || mir.upvar_decls.is_empty() {
515 &mircx.debug_context,
516 arg_decl.name.unwrap_or(keywords::Invalid.name()),
519 VariableAccess::DirectVariable { alloca: llval },
520 VariableKind::ArgumentVariable(arg_index + 1),
526 // Or is it the closure environment?
527 let (closure_ty, env_ref) = if let ty::TyRef(_, mt) = arg_ty.sty {
532 let upvar_tys = if let ty::TyClosure(def_id, substs) = closure_ty.sty {
533 substs.upvar_tys(def_id, tcx)
535 bug!("upvar_decls with non-closure arg0 type `{}`", closure_ty);
538 // Store the pointer to closure data in an alloca for debuginfo
539 // because that's what the llvm.dbg.declare intrinsic expects.
541 // FIXME(eddyb) this shouldn't be necessary but SROA seems to
542 // mishandle DW_OP_plus not preceded by DW_OP_deref, i.e. it
543 // doesn't actually strip the offset when splitting the closure
544 // environment into its components so it ends up out of bounds.
545 let env_ptr = if !env_ref {
546 let alloc = bcx.alloca(common::val_ty(llval), "__debuginfo_env_ptr", None);
547 bcx.store(llval, alloc, None);
553 let layout = bcx.ccx.layout_of(closure_ty);
554 let offsets = match *layout {
555 layout::Univariant { ref variant, .. } => &variant.offsets[..],
556 _ => bug!("Closures are only supposed to be Univariant")
559 for (i, (decl, ty)) in mir.upvar_decls.iter().zip(upvar_tys).enumerate() {
560 let byte_offset_of_var_in_env = offsets[i].bytes();
563 [llvm::LLVMRustDIBuilderCreateOpDeref(),
564 llvm::LLVMRustDIBuilderCreateOpPlus(),
565 byte_offset_of_var_in_env as i64,
566 llvm::LLVMRustDIBuilderCreateOpDeref()]
569 // The environment and the capture can each be indirect.
571 // FIXME(eddyb) see above why we have to keep
572 // a pointer in an alloca for debuginfo atm.
573 let mut ops = if env_ref || true { &ops[..] } else { &ops[1..] };
575 let ty = if let (true, &ty::TyRef(_, mt)) = (decl.by_ref, &ty.sty) {
578 ops = &ops[..ops.len() - 1];
582 let variable_access = VariableAccess::IndirectVariable {
584 address_operations: &ops
588 &mircx.debug_context,
593 VariableKind::CapturedVariable,
598 LocalRef::Lvalue(LvalueRef::new_sized(llval, LvalueTy::from_ty(arg_ty),
599 Alignment::AbiAligned))