1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 use llvm::{self, ValueRef};
14 use rustc::mir::repr as mir;
15 use rustc::mir::tcx::LvalueTy;
16 use session::config::FullDebugInfo;
18 use common::{self, Block, BlockAndBuilder, CrateContext, FunctionContext, C_null};
19 use debuginfo::{self, declare_local, DebugLoc, VariableAccess, VariableKind, FunctionDebugContext};
23 use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
24 use syntax::parse::token::keywords;
30 use basic_block::BasicBlock;
32 use rustc_data_structures::bitvec::BitVector;
33 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
35 pub use self::constant::trans_static_initializer;
37 use self::lvalue::{LvalueRef, get_dataptr, get_meta};
38 use rustc::mir::traversal;
40 use self::operand::{OperandRef, OperandValue};
43 pub enum CachedMir<'mir, 'tcx: 'mir> {
44 Ref(&'mir mir::Mir<'tcx>),
45 Owned(Rc<mir::Mir<'tcx>>)
48 impl<'mir, 'tcx: 'mir> Deref for CachedMir<'mir, 'tcx> {
49 type Target = mir::Mir<'tcx>;
50 fn deref(&self) -> &mir::Mir<'tcx> {
52 CachedMir::Ref(r) => r,
53 CachedMir::Owned(ref rc) => rc
58 /// Master context for translating MIR.
59 pub struct MirContext<'bcx, 'tcx:'bcx> {
60 mir: CachedMir<'bcx, 'tcx>,
63 fcx: &'bcx common::FunctionContext<'bcx, 'tcx>,
65 /// When unwinding is initiated, we have to store this personality
66 /// value somewhere so that we can load it and re-use it in the
67 /// resume instruction. The personality is (afaik) some kind of
68 /// value used for C++ unwinding, which must filter by type: we
69 /// don't really care about it very much. Anyway, this value
70 /// contains an alloca into which the personality is stored and
71 /// then later loaded when generating the DIVERGE_BLOCK.
72 llpersonalityslot: Option<ValueRef>,
74 /// A `Block` for each MIR `BasicBlock`
75 blocks: IndexVec<mir::BasicBlock, Block<'bcx, 'tcx>>,
77 /// The funclet status of each basic block
78 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
80 /// This stores the landing-pad block for a given BB, computed lazily on GNU
81 /// and eagerly on MSVC.
82 landing_pads: IndexVec<mir::BasicBlock, Option<Block<'bcx, 'tcx>>>,
84 /// Cached unreachable block
85 unreachable_block: Option<Block<'bcx, 'tcx>>,
87 /// The location where each MIR arg/var/tmp/ret is stored. This is
88 /// usually an `LvalueRef` representing an alloca, but not always:
89 /// sometimes we can skip the alloca and just store the value
90 /// directly using an `OperandRef`, which makes for tighter LLVM
91 /// IR. The conditions for using an `OperandRef` are as follows:
93 /// - the type of the local must be judged "immediate" by `type_is_immediate`
94 /// - the operand must never be referenced indirectly
95 /// - we should not take its address using the `&` operator
96 /// - nor should it appear in an lvalue path like `tmp.a`
97 /// - the operand must be defined by an rvalue that can generate immediate
100 /// Avoiding allocs can also be important for certain intrinsics,
101 /// notably `expect`.
102 locals: IndexVec<mir::Local, LocalRef<'tcx>>,
104 /// Debug information for MIR scopes.
105 scopes: IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
108 impl<'blk, 'tcx> MirContext<'blk, 'tcx> {
109 pub fn debug_loc(&mut self, source_info: mir::SourceInfo) -> DebugLoc {
110 // Bail out if debug info emission is not enabled.
111 match self.fcx.debug_context {
112 FunctionDebugContext::DebugInfoDisabled |
113 FunctionDebugContext::FunctionWithoutDebugInfo => {
114 // Can't return DebugLoc::None here because intrinsic::trans_intrinsic_call()
115 // relies on debug location to obtain span of the call site.
116 return DebugLoc::ScopeAt(self.scopes[source_info.scope].scope_metadata,
119 FunctionDebugContext::RegularContext(_) =>{}
122 // In order to have a good line stepping behavior in debugger, we overwrite debug
123 // locations of macro expansions with that of the outermost expansion site
124 // (unless the crate is being compiled with `-Z debug-macros`).
125 if source_info.span.expn_id == NO_EXPANSION ||
126 source_info.span.expn_id == COMMAND_LINE_EXPN ||
127 self.fcx.ccx.sess().opts.debugging_opts.debug_macros {
129 let scope_metadata = self.scope_metadata_for_loc(source_info.scope,
130 source_info.span.lo);
131 DebugLoc::ScopeAt(scope_metadata, source_info.span)
133 let cm = self.fcx.ccx.sess().codemap();
134 // Walk up the macro expansion chain until we reach a non-expanded span.
135 let mut span = source_info.span;
136 while span.expn_id != NO_EXPANSION && span.expn_id != COMMAND_LINE_EXPN {
137 if let Some(callsite_span) = cm.with_expn_info(span.expn_id,
138 |ei| ei.map(|ei| ei.call_site.clone())) {
139 span = callsite_span;
144 let scope_metadata = self.scope_metadata_for_loc(source_info.scope, span.lo);
145 // Use span of the outermost call site, while keeping the original lexical scope
146 DebugLoc::ScopeAt(scope_metadata, span)
150 // DILocations inherit source file name from the parent DIScope. Due to macro expansions
151 // it may so happen that the current span belongs to a different file than the DIScope
152 // corresponding to span's containing visibility scope. If so, we need to create a DIScope
153 // "extension" into that file.
154 fn scope_metadata_for_loc(&self, scope_id: mir::VisibilityScope, pos: BytePos)
155 -> llvm::debuginfo::DIScope {
156 let scope_metadata = self.scopes[scope_id].scope_metadata;
157 if pos < self.scopes[scope_id].file_start_pos ||
158 pos >= self.scopes[scope_id].file_end_pos {
159 let cm = self.fcx.ccx.sess().codemap();
160 debuginfo::extend_scope_to_file(self.fcx.ccx,
162 &cm.lookup_char_pos(pos).file)
169 enum LocalRef<'tcx> {
170 Lvalue(LvalueRef<'tcx>),
171 Operand(Option<OperandRef<'tcx>>),
174 impl<'tcx> LocalRef<'tcx> {
175 fn new_operand<'bcx>(ccx: &CrateContext<'bcx, 'tcx>,
176 ty: ty::Ty<'tcx>) -> LocalRef<'tcx> {
177 if common::type_is_zero_size(ccx, ty) {
178 // Zero-size temporaries aren't always initialized, which
179 // doesn't matter because they don't contain data, but
180 // we need something in the operand.
181 let llty = type_of::type_of(ccx, ty);
182 let val = if common::type_is_imm_pair(ccx, ty) {
183 let fields = llty.field_types();
184 OperandValue::Pair(C_null(fields[0]), C_null(fields[1]))
186 OperandValue::Immediate(C_null(llty))
188 let op = OperandRef {
192 LocalRef::Operand(Some(op))
194 LocalRef::Operand(None)
199 ///////////////////////////////////////////////////////////////////////////
201 pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
202 let bcx = fcx.init(true).build();
205 // Analyze the temps to determine which must be lvalues
207 let (lvalue_locals, cleanup_kinds) = bcx.with_block(|bcx| {
208 (analyze::lvalue_locals(bcx, &mir),
209 analyze::cleanup_kinds(bcx, &mir))
212 // Allocate a `Block` for every basic block
213 let block_bcxs: IndexVec<mir::BasicBlock, Block<'blk,'tcx>> =
214 mir.basic_blocks().indices().map(|bb| {
215 if bb == mir::START_BLOCK {
216 fcx.new_block("start")
218 fcx.new_block(&format!("{:?}", bb))
222 // Compute debuginfo scopes from MIR scopes.
223 let scopes = debuginfo::create_mir_scopes(fcx);
225 let mut mircx = MirContext {
228 llpersonalityslot: None,
230 unreachable_block: None,
231 cleanup_kinds: cleanup_kinds,
232 landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
234 locals: IndexVec::new(),
237 // Allocate variable and temp allocas
239 let args = arg_local_refs(&bcx, &mir, &mircx.scopes, &lvalue_locals);
241 let mut allocate_local = |local| {
242 let decl = &mir.local_decls[local];
243 let ty = bcx.monomorphize(&decl.ty);
245 if let Some(name) = decl.name {
247 let source_info = decl.source_info.unwrap();
248 let debug_scope = mircx.scopes[source_info.scope];
249 let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo;
251 if !lvalue_locals.contains(local.index()) && !dbg {
252 debug!("alloc: {:?} ({}) -> operand", local, name);
253 return LocalRef::new_operand(bcx.ccx(), ty);
256 debug!("alloc: {:?} ({}) -> lvalue", local, name);
257 let lvalue = LvalueRef::alloca(&bcx, ty, &name.as_str());
259 let dbg_loc = mircx.debug_loc(source_info);
260 if let DebugLoc::ScopeAt(scope, span) = dbg_loc {
261 bcx.with_block(|bcx| {
262 declare_local(bcx, name, ty, scope,
263 VariableAccess::DirectVariable { alloca: lvalue.llval },
264 VariableKind::LocalVariable, span);
267 panic!("Unexpected");
270 LocalRef::Lvalue(lvalue)
272 // Temporary or return pointer
273 if local == mir::RETURN_POINTER && fcx.fn_ty.ret.is_indirect() {
274 debug!("alloc: {:?} (return pointer) -> lvalue", local);
275 let llretptr = llvm::get_param(fcx.llfn, 0);
276 LocalRef::Lvalue(LvalueRef::new_sized(llretptr, LvalueTy::from_ty(ty)))
277 } else if lvalue_locals.contains(local.index()) {
278 debug!("alloc: {:?} -> lvalue", local);
279 LocalRef::Lvalue(LvalueRef::alloca(&bcx, ty, &format!("{:?}", local)))
281 // If this is an immediate local, we do not create an
282 // alloca in advance. Instead we wait until we see the
283 // definition and update the operand there.
284 debug!("alloc: {:?} -> operand", local);
285 LocalRef::new_operand(bcx.ccx(), ty)
290 let retptr = allocate_local(mir::RETURN_POINTER);
292 .chain(args.into_iter())
293 .chain(mir.var_and_temp_iter().map(&mut allocate_local))
297 // Branch to the START block
298 let start_bcx = mircx.blocks[mir::START_BLOCK];
299 bcx.br(start_bcx.llbb);
301 // Up until here, IR instructions for this function have explicitly not been annotated with
302 // source code location, so we don't step into call setup code. From here on, source location
303 // emitting should be enabled.
304 debuginfo::start_emitting_source_locations(fcx);
306 let mut visited = BitVector::new(mir.basic_blocks().len());
308 let mut rpo = traversal::reverse_postorder(&mir);
310 // Prepare each block for translation.
311 for (bb, _) in rpo.by_ref() {
316 // Translate the body of each block using reverse postorder
318 visited.insert(bb.index());
319 mircx.trans_block(bb);
322 // Remove blocks that haven't been visited, or have no
324 for bb in mir.basic_blocks().indices() {
325 let block = mircx.blocks[bb];
326 let block = BasicBlock(block.llbb);
328 if !visited.contains(bb.index()) {
329 debug!("trans_mir: block {:?} was not visited", bb);
334 DebugLoc::None.apply(fcx);
338 /// Produce, for each argument, a `ValueRef` pointing at the
339 /// argument's value. As arguments are lvalues, these are always
341 fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>,
342 mir: &mir::Mir<'tcx>,
343 scopes: &IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
344 lvalue_locals: &BitVector)
345 -> Vec<LocalRef<'tcx>> {
349 let mut llarg_idx = fcx.fn_ty.ret.is_indirect() as usize;
351 // Get the argument scope, if it exists and if we need it.
352 let arg_scope = scopes[mir::ARGUMENT_VISIBILITY_SCOPE];
353 let arg_scope = if arg_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo {
354 Some(arg_scope.scope_metadata)
359 mir.arg_iter().enumerate().map(|(arg_index, local)| {
360 let arg_decl = &mir.local_decls[local];
361 let arg_ty = bcx.monomorphize(&arg_decl.ty);
363 if let Some(spread_local) = mir.spread_arg {
364 if local == spread_local {
365 // This argument (e.g. the last argument in the "rust-call" ABI)
366 // is a tuple that was spread at the ABI level and now we have
367 // to reconstruct it into a tuple local variable, from multiple
368 // individual LLVM function arguments.
370 let tupled_arg_tys = match arg_ty.sty {
371 ty::TyTuple(ref tys) => tys,
372 _ => bug!("spread argument isn't a tuple?!")
375 let lltuplety = type_of::type_of(bcx.ccx(), arg_ty);
376 let lltemp = bcx.with_block(|bcx| {
377 base::alloc_ty(bcx, arg_ty, &format!("arg{}", arg_index))
379 for (i, &tupled_arg_ty) in tupled_arg_tys.iter().enumerate() {
380 let dst = bcx.struct_gep(lltemp, i);
381 let arg = &fcx.fn_ty.args[idx];
383 if common::type_is_fat_ptr(tcx, tupled_arg_ty) {
384 // We pass fat pointers as two words, but inside the tuple
385 // they are the two sub-fields of a single aggregate field.
386 let meta = &fcx.fn_ty.args[idx];
388 arg.store_fn_arg(bcx, &mut llarg_idx, get_dataptr(bcx, dst));
389 meta.store_fn_arg(bcx, &mut llarg_idx, get_meta(bcx, dst));
391 arg.store_fn_arg(bcx, &mut llarg_idx, dst);
394 bcx.with_block(|bcx| arg_scope.map(|scope| {
395 let byte_offset_of_var_in_tuple =
396 machine::llelement_offset(bcx.ccx(), lltuplety, i);
399 [llvm::LLVMRustDIBuilderCreateOpDeref(),
400 llvm::LLVMRustDIBuilderCreateOpPlus(),
401 byte_offset_of_var_in_tuple as i64]
404 let variable_access = VariableAccess::IndirectVariable {
406 address_operations: &ops
408 declare_local(bcx, keywords::Invalid.name(),
409 tupled_arg_ty, scope, variable_access,
410 VariableKind::ArgumentVariable(arg_index + i + 1),
411 bcx.fcx().span.unwrap_or(DUMMY_SP));
414 return LocalRef::Lvalue(LvalueRef::new_sized(lltemp, LvalueTy::from_ty(arg_ty)));
418 let arg = &fcx.fn_ty.args[idx];
420 let llval = if arg.is_indirect() && bcx.sess().opts.debuginfo != FullDebugInfo {
421 // Don't copy an indirect argument to an alloca, the caller
422 // already put it in a temporary alloca and gave it up, unless
423 // we emit extra-debug-info, which requires local allocas :(.
425 if arg.pad.is_some() {
428 let llarg = llvm::get_param(fcx.llfn, llarg_idx as c_uint);
431 } else if !lvalue_locals.contains(local.index()) &&
432 !arg.is_indirect() && arg.cast.is_none() &&
433 arg_scope.is_none() {
435 return LocalRef::new_operand(bcx.ccx(), arg_ty);
438 // We don't have to cast or keep the argument in the alloca.
439 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
440 // of putting everything in allocas just so we can use llvm.dbg.declare.
441 if arg.pad.is_some() {
444 let llarg = llvm::get_param(fcx.llfn, llarg_idx as c_uint);
446 let val = if common::type_is_fat_ptr(tcx, arg_ty) {
447 let meta = &fcx.fn_ty.args[idx];
449 assert_eq!((meta.cast, meta.pad), (None, None));
450 let llmeta = llvm::get_param(fcx.llfn, llarg_idx as c_uint);
452 OperandValue::Pair(llarg, llmeta)
454 OperandValue::Immediate(llarg)
456 let operand = OperandRef {
460 return LocalRef::Operand(Some(operand.unpack_if_pair(bcx)));
462 let lltemp = bcx.with_block(|bcx| {
463 base::alloc_ty(bcx, arg_ty, &format!("arg{}", arg_index))
465 if common::type_is_fat_ptr(tcx, arg_ty) {
466 // we pass fat pointers as two words, but we want to
467 // represent them internally as a pointer to two words,
468 // so make an alloca to store them in.
469 let meta = &fcx.fn_ty.args[idx];
471 arg.store_fn_arg(bcx, &mut llarg_idx, get_dataptr(bcx, lltemp));
472 meta.store_fn_arg(bcx, &mut llarg_idx, get_meta(bcx, lltemp));
474 // otherwise, arg is passed by value, so make a
475 // temporary and store it there
476 arg.store_fn_arg(bcx, &mut llarg_idx, lltemp);
480 bcx.with_block(|bcx| arg_scope.map(|scope| {
481 // Is this a regular argument?
482 if arg_index > 0 || mir.upvar_decls.is_empty() {
483 declare_local(bcx, arg_decl.name.unwrap_or(keywords::Invalid.name()), arg_ty,
484 scope, VariableAccess::DirectVariable { alloca: llval },
485 VariableKind::ArgumentVariable(arg_index + 1),
486 bcx.fcx().span.unwrap_or(DUMMY_SP));
490 // Or is it the closure environment?
491 let (closure_ty, env_ref) = if let ty::TyRef(_, mt) = arg_ty.sty {
496 let upvar_tys = if let ty::TyClosure(_, ref substs) = closure_ty.sty {
497 &substs.upvar_tys[..]
499 bug!("upvar_decls with non-closure arg0 type `{}`", closure_ty);
502 // Store the pointer to closure data in an alloca for debuginfo
503 // because that's what the llvm.dbg.declare intrinsic expects.
505 // FIXME(eddyb) this shouldn't be necessary but SROA seems to
506 // mishandle DW_OP_plus not preceded by DW_OP_deref, i.e. it
507 // doesn't actually strip the offset when splitting the closure
508 // environment into its components so it ends up out of bounds.
509 let env_ptr = if !env_ref {
513 let alloc = alloca(bcx, val_ty(llval), "__debuginfo_env_ptr");
514 Store(bcx, llval, alloc);
520 let llclosurety = type_of::type_of(bcx.ccx(), closure_ty);
521 for (i, (decl, ty)) in mir.upvar_decls.iter().zip(upvar_tys).enumerate() {
522 let byte_offset_of_var_in_env =
523 machine::llelement_offset(bcx.ccx(), llclosurety, i);
526 [llvm::LLVMRustDIBuilderCreateOpDeref(),
527 llvm::LLVMRustDIBuilderCreateOpPlus(),
528 byte_offset_of_var_in_env as i64,
529 llvm::LLVMRustDIBuilderCreateOpDeref()]
532 // The environment and the capture can each be indirect.
534 // FIXME(eddyb) see above why we have to keep
535 // a pointer in an alloca for debuginfo atm.
536 let mut ops = if env_ref || true { &ops[..] } else { &ops[1..] };
538 let ty = if let (true, &ty::TyRef(_, mt)) = (decl.by_ref, &ty.sty) {
541 ops = &ops[..ops.len() - 1];
545 let variable_access = VariableAccess::IndirectVariable {
547 address_operations: &ops
549 declare_local(bcx, decl.debug_name, ty, scope, variable_access,
550 VariableKind::CapturedVariable,
551 bcx.fcx().span.unwrap_or(DUMMY_SP));
554 LocalRef::Lvalue(LvalueRef::new_sized(llval, LvalueTy::from_ty(arg_ty)))