1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 use llvm::{self, ValueRef};
14 use rustc::mir::repr as mir;
15 use rustc::mir::tcx::LvalueTy;
16 use session::config::FullDebugInfo;
18 use common::{self, Block, BlockAndBuilder, CrateContext, FunctionContext, C_null};
19 use debuginfo::{self, declare_local, DebugLoc, VariableAccess, VariableKind, FunctionDebugContext};
23 use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
24 use syntax::parse::token::keywords;
30 use basic_block::BasicBlock;
32 use rustc_data_structures::bitvec::BitVector;
33 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
35 pub use self::constant::trans_static_initializer;
37 use self::lvalue::{LvalueRef, get_dataptr, get_meta};
38 use rustc::mir::traversal;
40 use self::operand::{OperandRef, OperandValue};
43 pub enum CachedMir<'mir, 'tcx: 'mir> {
44 Ref(&'mir mir::Mir<'tcx>),
45 Owned(Rc<mir::Mir<'tcx>>)
48 impl<'mir, 'tcx: 'mir> Deref for CachedMir<'mir, 'tcx> {
49 type Target = mir::Mir<'tcx>;
50 fn deref(&self) -> &mir::Mir<'tcx> {
52 CachedMir::Ref(r) => r,
53 CachedMir::Owned(ref rc) => rc
58 /// Master context for translating MIR.
59 pub struct MirContext<'bcx, 'tcx:'bcx> {
60 mir: CachedMir<'bcx, 'tcx>,
63 fcx: &'bcx common::FunctionContext<'bcx, 'tcx>,
65 /// When unwinding is initiated, we have to store this personality
66 /// value somewhere so that we can load it and re-use it in the
67 /// resume instruction. The personality is (afaik) some kind of
68 /// value used for C++ unwinding, which must filter by type: we
69 /// don't really care about it very much. Anyway, this value
70 /// contains an alloca into which the personality is stored and
71 /// then later loaded when generating the DIVERGE_BLOCK.
72 llpersonalityslot: Option<ValueRef>,
74 /// A `Block` for each MIR `BasicBlock`
75 blocks: IndexVec<mir::BasicBlock, Block<'bcx, 'tcx>>,
77 /// The funclet status of each basic block
78 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
80 /// This stores the landing-pad block for a given BB, computed lazily on GNU
81 /// and eagerly on MSVC.
82 landing_pads: IndexVec<mir::BasicBlock, Option<Block<'bcx, 'tcx>>>,
84 /// Cached unreachable block
85 unreachable_block: Option<Block<'bcx, 'tcx>>,
87 /// The location where each MIR arg/var/tmp/ret is stored. This is
88 /// usually an `LvalueRef` representing an alloca, but not always:
89 /// sometimes we can skip the alloca and just store the value
90 /// directly using an `OperandRef`, which makes for tighter LLVM
91 /// IR. The conditions for using an `OperandRef` are as follows:
93 /// - the type of the local must be judged "immediate" by `type_is_immediate`
94 /// - the operand must never be referenced indirectly
95 /// - we should not take its address using the `&` operator
96 /// - nor should it appear in an lvalue path like `tmp.a`
97 /// - the operand must be defined by an rvalue that can generate immediate
100 /// Avoiding allocs can also be important for certain intrinsics,
101 /// notably `expect`.
102 locals: IndexVec<mir::Local, LocalRef<'tcx>>,
104 /// Debug information for MIR scopes.
105 scopes: IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
108 impl<'blk, 'tcx> MirContext<'blk, 'tcx> {
109 pub fn debug_loc(&mut self, source_info: mir::SourceInfo) -> DebugLoc {
110 // Bail out if debug info emission is not enabled.
111 match self.fcx.debug_context {
112 FunctionDebugContext::DebugInfoDisabled |
113 FunctionDebugContext::FunctionWithoutDebugInfo => {
114 // Can't return DebugLoc::None here because intrinsic::trans_intrinsic_call()
115 // relies on debug location to obtain span of the call site.
116 return DebugLoc::ScopeAt(self.scopes[source_info.scope].scope_metadata,
119 FunctionDebugContext::RegularContext(_) =>{}
122 // In order to have a good line stepping behavior in debugger, we overwrite debug
123 // locations of macro expansions with that of the outermost expansion site
124 // (unless the crate is being compiled with `-Z debug-macros`).
125 if source_info.span.expn_id == NO_EXPANSION ||
126 source_info.span.expn_id == COMMAND_LINE_EXPN ||
127 self.fcx.ccx.sess().opts.debugging_opts.debug_macros {
129 let scope_metadata = self.scope_metadata_for_loc(source_info.scope,
130 source_info.span.lo);
131 DebugLoc::ScopeAt(scope_metadata, source_info.span)
133 let cm = self.fcx.ccx.sess().codemap();
134 // Walk up the macro expansion chain until we reach a non-expanded span.
135 let mut span = source_info.span;
136 while span.expn_id != NO_EXPANSION && span.expn_id != COMMAND_LINE_EXPN {
137 if let Some(callsite_span) = cm.with_expn_info(span.expn_id,
138 |ei| ei.map(|ei| ei.call_site.clone())) {
139 span = callsite_span;
144 let scope_metadata = self.scope_metadata_for_loc(source_info.scope, span.lo);
145 // Use span of the outermost call site, while keeping the original lexical scope
146 DebugLoc::ScopeAt(scope_metadata, span)
150 // DILocations inherit source file name from the parent DIScope. Due to macro expansions
151 // it may so happen that the current span belongs to a different file than the DIScope
152 // corresponding to span's containing visibility scope. If so, we need to create a DIScope
153 // "extension" into that file.
154 fn scope_metadata_for_loc(&self, scope_id: mir::VisibilityScope, pos: BytePos)
155 -> llvm::debuginfo::DIScope {
156 let scope_metadata = self.scopes[scope_id].scope_metadata;
157 if pos < self.scopes[scope_id].file_start_pos ||
158 pos >= self.scopes[scope_id].file_end_pos {
159 let cm = self.fcx.ccx.sess().codemap();
160 debuginfo::extend_scope_to_file(self.fcx.ccx,
162 &cm.lookup_char_pos(pos).file)
169 enum LocalRef<'tcx> {
170 Lvalue(LvalueRef<'tcx>),
171 Operand(Option<OperandRef<'tcx>>),
174 impl<'tcx> LocalRef<'tcx> {
175 fn new_operand<'bcx>(ccx: &CrateContext<'bcx, 'tcx>,
176 ty: ty::Ty<'tcx>) -> LocalRef<'tcx> {
177 if common::type_is_zero_size(ccx, ty) {
178 // Zero-size temporaries aren't always initialized, which
179 // doesn't matter because they don't contain data, but
180 // we need something in the operand.
181 let llty = type_of::type_of(ccx, ty);
182 let val = if common::type_is_imm_pair(ccx, ty) {
183 let fields = llty.field_types();
184 OperandValue::Pair(C_null(fields[0]), C_null(fields[1]))
186 OperandValue::Immediate(C_null(llty))
188 let op = OperandRef {
192 LocalRef::Operand(Some(op))
194 LocalRef::Operand(None)
199 ///////////////////////////////////////////////////////////////////////////
201 pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
202 let bcx = fcx.init(true).build();
205 // Analyze the temps to determine which must be lvalues
207 let (lvalue_locals, cleanup_kinds) = bcx.with_block(|bcx| {
208 (analyze::lvalue_locals(bcx, &mir),
209 analyze::cleanup_kinds(bcx, &mir))
212 // Allocate a `Block` for every basic block
213 let block_bcxs: IndexVec<mir::BasicBlock, Block<'blk,'tcx>> =
214 mir.basic_blocks().indices().map(|bb| {
215 if bb == mir::START_BLOCK {
216 fcx.new_block("start")
218 fcx.new_block(&format!("{:?}", bb))
222 // Compute debuginfo scopes from MIR scopes.
223 let scopes = debuginfo::create_mir_scopes(fcx);
225 let mut mircx = MirContext {
228 llpersonalityslot: None,
230 unreachable_block: None,
231 cleanup_kinds: cleanup_kinds,
232 landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
234 locals: IndexVec::new(),
237 // Allocate variable and temp allocas
239 let args = arg_local_refs(&bcx, &mir, &mircx.scopes, &lvalue_locals);
240 let vars = mir.var_decls.iter().enumerate().map(|(i, decl)| {
241 let ty = bcx.monomorphize(&decl.ty);
242 let debug_scope = mircx.scopes[decl.source_info.scope];
243 let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo;
245 let local = mir.local_index(&mir::Lvalue::Var(mir::Var::new(i))).unwrap();
246 if !lvalue_locals.contains(local.index()) && !dbg {
247 return LocalRef::new_operand(bcx.ccx(), ty);
250 let lvalue = LvalueRef::alloca(&bcx, ty, &decl.name.as_str());
252 let dbg_loc = mircx.debug_loc(decl.source_info);
253 if let DebugLoc::ScopeAt(scope, span) = dbg_loc {
254 bcx.with_block(|bcx| {
255 declare_local(bcx, decl.name, ty, scope,
256 VariableAccess::DirectVariable { alloca: lvalue.llval },
257 VariableKind::LocalVariable, span);
260 panic!("Unexpected");
263 LocalRef::Lvalue(lvalue)
266 let locals = mir.temp_decls.iter().enumerate().map(|(i, decl)| {
267 (mir::Lvalue::Temp(mir::Temp::new(i)), decl.ty)
268 }).chain(iter::once((mir::Lvalue::ReturnPointer, mir.return_ty)));
270 args.into_iter().chain(vars).chain(locals.map(|(lvalue, ty)| {
271 let ty = bcx.monomorphize(&ty);
272 let local = mir.local_index(&lvalue).unwrap();
273 if lvalue == mir::Lvalue::ReturnPointer && fcx.fn_ty.ret.is_indirect() {
274 let llretptr = llvm::get_param(fcx.llfn, 0);
275 LocalRef::Lvalue(LvalueRef::new_sized(llretptr, LvalueTy::from_ty(ty)))
276 } else if lvalue_locals.contains(local.index()) {
277 LocalRef::Lvalue(LvalueRef::alloca(&bcx, ty, &format!("{:?}", lvalue)))
279 // If this is an immediate local, we do not create an
280 // alloca in advance. Instead we wait until we see the
281 // definition and update the operand there.
282 LocalRef::new_operand(bcx.ccx(), ty)
287 // Branch to the START block
288 let start_bcx = mircx.blocks[mir::START_BLOCK];
289 bcx.br(start_bcx.llbb);
291 // Up until here, IR instructions for this function have explicitly not been annotated with
292 // source code location, so we don't step into call setup code. From here on, source location
293 // emitting should be enabled.
294 debuginfo::start_emitting_source_locations(fcx);
296 let mut visited = BitVector::new(mir.basic_blocks().len());
298 let mut rpo = traversal::reverse_postorder(&mir);
300 // Prepare each block for translation.
301 for (bb, _) in rpo.by_ref() {
306 // Translate the body of each block using reverse postorder
308 visited.insert(bb.index());
309 mircx.trans_block(bb);
312 // Remove blocks that haven't been visited, or have no
314 for bb in mir.basic_blocks().indices() {
315 let block = mircx.blocks[bb];
316 let block = BasicBlock(block.llbb);
318 if !visited.contains(bb.index()) {
319 debug!("trans_mir: block {:?} was not visited", bb);
324 DebugLoc::None.apply(fcx);
328 /// Produce, for each argument, a `ValueRef` pointing at the
329 /// argument's value. As arguments are lvalues, these are always
331 fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>,
332 mir: &mir::Mir<'tcx>,
333 scopes: &IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
334 lvalue_locals: &BitVector)
335 -> Vec<LocalRef<'tcx>> {
339 let mut llarg_idx = fcx.fn_ty.ret.is_indirect() as usize;
341 // Get the argument scope, if it exists and if we need it.
342 let arg_scope = scopes[mir::ARGUMENT_VISIBILITY_SCOPE];
343 let arg_scope = if arg_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo {
344 Some(arg_scope.scope_metadata)
349 mir.arg_decls.iter().enumerate().map(|(arg_index, arg_decl)| {
350 let arg_ty = bcx.monomorphize(&arg_decl.ty);
351 let local = mir.local_index(&mir::Lvalue::Arg(mir::Arg::new(arg_index))).unwrap();
353 // This argument (e.g. the last argument in the "rust-call" ABI)
354 // is a tuple that was spread at the ABI level and now we have
355 // to reconstruct it into a tuple local variable, from multiple
356 // individual LLVM function arguments.
358 let tupled_arg_tys = match arg_ty.sty {
359 ty::TyTuple(ref tys) => tys,
360 _ => bug!("spread argument isn't a tuple?!")
363 let lltuplety = type_of::type_of(bcx.ccx(), arg_ty);
364 let lltemp = bcx.with_block(|bcx| {
365 base::alloc_ty(bcx, arg_ty, &format!("arg{}", arg_index))
367 for (i, &tupled_arg_ty) in tupled_arg_tys.iter().enumerate() {
368 let dst = bcx.struct_gep(lltemp, i);
369 let arg = &fcx.fn_ty.args[idx];
371 if common::type_is_fat_ptr(tcx, tupled_arg_ty) {
372 // We pass fat pointers as two words, but inside the tuple
373 // they are the two sub-fields of a single aggregate field.
374 let meta = &fcx.fn_ty.args[idx];
376 arg.store_fn_arg(bcx, &mut llarg_idx, get_dataptr(bcx, dst));
377 meta.store_fn_arg(bcx, &mut llarg_idx, get_meta(bcx, dst));
379 arg.store_fn_arg(bcx, &mut llarg_idx, dst);
382 bcx.with_block(|bcx| arg_scope.map(|scope| {
383 let byte_offset_of_var_in_tuple =
384 machine::llelement_offset(bcx.ccx(), lltuplety, i);
387 [llvm::LLVMRustDIBuilderCreateOpDeref(),
388 llvm::LLVMRustDIBuilderCreateOpPlus(),
389 byte_offset_of_var_in_tuple as i64]
392 let variable_access = VariableAccess::IndirectVariable {
394 address_operations: &ops
396 declare_local(bcx, keywords::Invalid.name(),
397 tupled_arg_ty, scope, variable_access,
398 VariableKind::ArgumentVariable(arg_index + i + 1),
399 bcx.fcx().span.unwrap_or(DUMMY_SP));
402 return LocalRef::Lvalue(LvalueRef::new_sized(lltemp, LvalueTy::from_ty(arg_ty)));
405 let arg = &fcx.fn_ty.args[idx];
407 let llval = if arg.is_indirect() && bcx.sess().opts.debuginfo != FullDebugInfo {
408 // Don't copy an indirect argument to an alloca, the caller
409 // already put it in a temporary alloca and gave it up, unless
410 // we emit extra-debug-info, which requires local allocas :(.
412 if arg.pad.is_some() {
415 let llarg = llvm::get_param(fcx.llfn, llarg_idx as c_uint);
418 } else if !lvalue_locals.contains(local.index()) &&
419 !arg.is_indirect() && arg.cast.is_none() &&
420 arg_scope.is_none() {
422 return LocalRef::new_operand(bcx.ccx(), arg_ty);
425 // We don't have to cast or keep the argument in the alloca.
426 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
427 // of putting everything in allocas just so we can use llvm.dbg.declare.
428 if arg.pad.is_some() {
431 let llarg = llvm::get_param(fcx.llfn, llarg_idx as c_uint);
433 let val = if common::type_is_fat_ptr(tcx, arg_ty) {
434 let meta = &fcx.fn_ty.args[idx];
436 assert_eq!((meta.cast, meta.pad), (None, None));
437 let llmeta = llvm::get_param(fcx.llfn, llarg_idx as c_uint);
439 OperandValue::Pair(llarg, llmeta)
441 OperandValue::Immediate(llarg)
443 let operand = OperandRef {
447 return LocalRef::Operand(Some(operand.unpack_if_pair(bcx)));
449 let lltemp = bcx.with_block(|bcx| {
450 base::alloc_ty(bcx, arg_ty, &format!("arg{}", arg_index))
452 if common::type_is_fat_ptr(tcx, arg_ty) {
453 // we pass fat pointers as two words, but we want to
454 // represent them internally as a pointer to two words,
455 // so make an alloca to store them in.
456 let meta = &fcx.fn_ty.args[idx];
458 arg.store_fn_arg(bcx, &mut llarg_idx, get_dataptr(bcx, lltemp));
459 meta.store_fn_arg(bcx, &mut llarg_idx, get_meta(bcx, lltemp));
461 // otherwise, arg is passed by value, so make a
462 // temporary and store it there
463 arg.store_fn_arg(bcx, &mut llarg_idx, lltemp);
467 bcx.with_block(|bcx| arg_scope.map(|scope| {
468 // Is this a regular argument?
469 if arg_index > 0 || mir.upvar_decls.is_empty() {
470 declare_local(bcx, arg_decl.debug_name, arg_ty, scope,
471 VariableAccess::DirectVariable { alloca: llval },
472 VariableKind::ArgumentVariable(arg_index + 1),
473 bcx.fcx().span.unwrap_or(DUMMY_SP));
477 // Or is it the closure environment?
478 let (closure_ty, env_ref) = if let ty::TyRef(_, mt) = arg_ty.sty {
483 let upvar_tys = if let ty::TyClosure(_, ref substs) = closure_ty.sty {
484 &substs.upvar_tys[..]
486 bug!("upvar_decls with non-closure arg0 type `{}`", closure_ty);
489 // Store the pointer to closure data in an alloca for debuginfo
490 // because that's what the llvm.dbg.declare intrinsic expects.
492 // FIXME(eddyb) this shouldn't be necessary but SROA seems to
493 // mishandle DW_OP_plus not preceded by DW_OP_deref, i.e. it
494 // doesn't actually strip the offset when splitting the closure
495 // environment into its components so it ends up out of bounds.
496 let env_ptr = if !env_ref {
500 let alloc = alloca(bcx, val_ty(llval), "__debuginfo_env_ptr");
501 Store(bcx, llval, alloc);
507 let llclosurety = type_of::type_of(bcx.ccx(), closure_ty);
508 for (i, (decl, ty)) in mir.upvar_decls.iter().zip(upvar_tys).enumerate() {
509 let byte_offset_of_var_in_env =
510 machine::llelement_offset(bcx.ccx(), llclosurety, i);
513 [llvm::LLVMRustDIBuilderCreateOpDeref(),
514 llvm::LLVMRustDIBuilderCreateOpPlus(),
515 byte_offset_of_var_in_env as i64,
516 llvm::LLVMRustDIBuilderCreateOpDeref()]
519 // The environment and the capture can each be indirect.
521 // FIXME(eddyb) see above why we have to keep
522 // a pointer in an alloca for debuginfo atm.
523 let mut ops = if env_ref || true { &ops[..] } else { &ops[1..] };
525 let ty = if let (true, &ty::TyRef(_, mt)) = (decl.by_ref, &ty.sty) {
528 ops = &ops[..ops.len() - 1];
532 let variable_access = VariableAccess::IndirectVariable {
534 address_operations: &ops
536 declare_local(bcx, decl.debug_name, ty, scope, variable_access,
537 VariableKind::CapturedVariable,
538 bcx.fcx().span.unwrap_or(DUMMY_SP));
541 LocalRef::Lvalue(LvalueRef::new_sized(llval, LvalueTy::from_ty(arg_ty)))