]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/mir/block.rs
Account for --remap-path-prefix in save-analysis
[rust.git] / src / librustc_codegen_llvm / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, BasicBlock};
12 use rustc::middle::lang_items;
13 use rustc::ty::{self, Ty, TypeFoldable};
14 use rustc::ty::layout::{self, LayoutOf};
15 use rustc::mir;
16 use rustc::mir::interpret::EvalErrorKind;
17 use abi::{Abi, ArgType, ArgTypeExt, FnType, FnTypeExt, LlvmType, PassMode};
18 use base;
19 use callee;
20 use builder::{Builder, MemFlags};
21 use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_uint_big, C_undef};
22 use consts;
23 use meth;
24 use monomorphize;
25 use type_of::LayoutLlvmExt;
26 use type_::Type;
27 use value::Value;
28
29 use syntax::symbol::Symbol;
30 use syntax_pos::Pos;
31
32 use super::{FunctionCx, LocalRef};
33 use super::place::PlaceRef;
34 use super::operand::OperandRef;
35 use super::operand::OperandValue::{Pair, Ref, Immediate};
36
37 impl FunctionCx<'a, 'll, 'tcx> {
38     pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
39         let mut bx = self.build_block(bb);
40         let data = &self.mir[bb];
41
42         debug!("codegen_block({:?}={:?})", bb, data);
43
44         for statement in &data.statements {
45             bx = self.codegen_statement(bx, statement);
46         }
47
48         self.codegen_terminator(bx, bb, data.terminator());
49     }
50
51     fn codegen_terminator(&mut self,
52                         mut bx: Builder<'a, 'll, 'tcx>,
53                         bb: mir::BasicBlock,
54                         terminator: &mir::Terminator<'tcx>)
55     {
56         debug!("codegen_terminator: {:?}", terminator);
57
58         // Create the cleanup bundle, if needed.
59         let tcx = bx.tcx();
60         let span = terminator.source_info.span;
61         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
62         let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
63
64         let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
65         let cleanup_bundle = funclet.map(|l| l.bundle());
66
67         let lltarget = |this: &mut Self, target: mir::BasicBlock| {
68             let lltarget = this.blocks[target];
69             let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
70             match (funclet_bb, target_funclet) {
71                 (None, None) => (lltarget, false),
72                 (Some(f), Some(t_f))
73                     if f == t_f || !base::wants_msvc_seh(tcx.sess)
74                     => (lltarget, false),
75                 (None, Some(_)) => {
76                     // jump *into* cleanup - need a landing pad if GNU
77                     (this.landing_pad_to(target), false)
78                 }
79                 (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
80                 (Some(_), Some(_)) => {
81                     (this.landing_pad_to(target), true)
82                 }
83             }
84         };
85
86         let llblock = |this: &mut Self, target: mir::BasicBlock| {
87             let (lltarget, is_cleanupret) = lltarget(this, target);
88             if is_cleanupret {
89                 // MSVC cross-funclet jump - need a trampoline
90
91                 debug!("llblock: creating cleanup trampoline for {:?}", target);
92                 let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
93                 let trampoline = this.new_block(name);
94                 trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
95                 trampoline.llbb()
96             } else {
97                 lltarget
98             }
99         };
100
101         let funclet_br = |this: &mut Self, bx: Builder<'_, 'll, '_>, target: mir::BasicBlock| {
102             let (lltarget, is_cleanupret) = lltarget(this, target);
103             if is_cleanupret {
104                 // micro-optimization: generate a `ret` rather than a jump
105                 // to a trampoline.
106                 bx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
107             } else {
108                 bx.br(lltarget);
109             }
110         };
111
112         let do_call = |
113             this: &mut Self,
114             bx: Builder<'a, 'll, 'tcx>,
115             fn_ty: FnType<'tcx, Ty<'tcx>>,
116             fn_ptr: &'ll Value,
117             llargs: &[&'ll Value],
118             destination: Option<(ReturnDest<'ll, 'tcx>, mir::BasicBlock)>,
119             cleanup: Option<mir::BasicBlock>
120         | {
121             if let Some(cleanup) = cleanup {
122                 let ret_bx = if let Some((_, target)) = destination {
123                     this.blocks[target]
124                 } else {
125                     this.unreachable_block()
126                 };
127                 let invokeret = bx.invoke(fn_ptr,
128                                            &llargs,
129                                            ret_bx,
130                                            llblock(this, cleanup),
131                                            cleanup_bundle);
132                 fn_ty.apply_attrs_callsite(&bx, invokeret);
133
134                 if let Some((ret_dest, target)) = destination {
135                     let ret_bx = this.build_block(target);
136                     this.set_debug_loc(&ret_bx, terminator.source_info);
137                     this.store_return(&ret_bx, ret_dest, &fn_ty.ret, invokeret);
138                 }
139             } else {
140                 let llret = bx.call(fn_ptr, &llargs, cleanup_bundle);
141                 fn_ty.apply_attrs_callsite(&bx, llret);
142                 if this.mir[bb].is_cleanup {
143                     // Cleanup is always the cold path. Don't inline
144                     // drop glue. Also, when there is a deeply-nested
145                     // struct, there are "symmetry" issues that cause
146                     // exponential inlining - see issue #41696.
147                     llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
148                 }
149
150                 if let Some((ret_dest, target)) = destination {
151                     this.store_return(&bx, ret_dest, &fn_ty.ret, llret);
152                     funclet_br(this, bx, target);
153                 } else {
154                     bx.unreachable();
155                 }
156             }
157         };
158
159         self.set_debug_loc(&bx, terminator.source_info);
160         match terminator.kind {
161             mir::TerminatorKind::Resume => {
162                 if let Some(cleanup_pad) = cleanup_pad {
163                     bx.cleanup_ret(cleanup_pad, None);
164                 } else {
165                     let slot = self.get_personality_slot(&bx);
166                     let lp0 = slot.project_field(&bx, 0).load(&bx).immediate();
167                     let lp1 = slot.project_field(&bx, 1).load(&bx).immediate();
168                     slot.storage_dead(&bx);
169
170                     if !bx.sess().target.target.options.custom_unwind_resume {
171                         let mut lp = C_undef(self.landing_pad_type());
172                         lp = bx.insert_value(lp, lp0, 0);
173                         lp = bx.insert_value(lp, lp1, 1);
174                         bx.resume(lp);
175                     } else {
176                         bx.call(bx.cx.eh_unwind_resume(), &[lp0], cleanup_bundle);
177                         bx.unreachable();
178                     }
179                 }
180             }
181
182             mir::TerminatorKind::Abort => {
183                 // Call core::intrinsics::abort()
184                 let fnname = bx.cx.get_intrinsic(&("llvm.trap"));
185                 bx.call(fnname, &[], None);
186                 bx.unreachable();
187             }
188
189             mir::TerminatorKind::Goto { target } => {
190                 funclet_br(self, bx, target);
191             }
192
193             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
194                 let discr = self.codegen_operand(&bx, discr);
195                 if targets.len() == 2 {
196                     // If there are two targets, emit br instead of switch
197                     let lltrue = llblock(self, targets[0]);
198                     let llfalse = llblock(self, targets[1]);
199                     if switch_ty == bx.tcx().types.bool {
200                         // Don't generate trivial icmps when switching on bool
201                         if let [0] = values[..] {
202                             bx.cond_br(discr.immediate(), llfalse, lltrue);
203                         } else {
204                             assert_eq!(&values[..], &[1]);
205                             bx.cond_br(discr.immediate(), lltrue, llfalse);
206                         }
207                     } else {
208                         let switch_llty = bx.cx.layout_of(switch_ty).immediate_llvm_type(bx.cx);
209                         let llval = C_uint_big(switch_llty, values[0]);
210                         let cmp = bx.icmp(llvm::IntEQ, discr.immediate(), llval);
211                         bx.cond_br(cmp, lltrue, llfalse);
212                     }
213                 } else {
214                     let (otherwise, targets) = targets.split_last().unwrap();
215                     let switch = bx.switch(discr.immediate(),
216                                             llblock(self, *otherwise), values.len());
217                     let switch_llty = bx.cx.layout_of(switch_ty).immediate_llvm_type(bx.cx);
218                     for (&value, target) in values.iter().zip(targets) {
219                         let llval = C_uint_big(switch_llty, value);
220                         let llbb = llblock(self, *target);
221                         bx.add_case(switch, llval, llbb)
222                     }
223                 }
224             }
225
226             mir::TerminatorKind::Return => {
227                 let llval = match self.fn_ty.ret.mode {
228                     PassMode::Ignore | PassMode::Indirect(_) => {
229                         bx.ret_void();
230                         return;
231                     }
232
233                     PassMode::Direct(_) | PassMode::Pair(..) => {
234                         let op = self.codegen_consume(&bx, &mir::Place::Local(mir::RETURN_PLACE));
235                         if let Ref(llval, align) = op.val {
236                             bx.load(llval, align)
237                         } else {
238                             op.immediate_or_packed_pair(&bx)
239                         }
240                     }
241
242                     PassMode::Cast(cast_ty) => {
243                         let op = match self.locals[mir::RETURN_PLACE] {
244                             LocalRef::Operand(Some(op)) => op,
245                             LocalRef::Operand(None) => bug!("use of return before def"),
246                             LocalRef::Place(cg_place) => {
247                                 OperandRef {
248                                     val: Ref(cg_place.llval, cg_place.align),
249                                     layout: cg_place.layout
250                                 }
251                             }
252                         };
253                         let llslot = match op.val {
254                             Immediate(_) | Pair(..) => {
255                                 let scratch = PlaceRef::alloca(&bx, self.fn_ty.ret.layout, "ret");
256                                 op.val.store(&bx, scratch);
257                                 scratch.llval
258                             }
259                             Ref(llval, align) => {
260                                 assert_eq!(align.abi(), op.layout.align.abi(),
261                                            "return place is unaligned!");
262                                 llval
263                             }
264                         };
265                         bx.load(
266                             bx.pointercast(llslot, cast_ty.llvm_type(bx.cx).ptr_to()),
267                             self.fn_ty.ret.layout.align)
268                     }
269                 };
270                 bx.ret(llval);
271             }
272
273             mir::TerminatorKind::Unreachable => {
274                 bx.unreachable();
275             }
276
277             mir::TerminatorKind::Drop { ref location, target, unwind } => {
278                 let ty = location.ty(self.mir, bx.tcx()).to_ty(bx.tcx());
279                 let ty = self.monomorphize(&ty);
280                 let drop_fn = monomorphize::resolve_drop_in_place(bx.cx.tcx, ty);
281
282                 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
283                     // we don't actually need to drop anything.
284                     funclet_br(self, bx, target);
285                     return
286                 }
287
288                 let place = self.codegen_place(&bx, location);
289                 let (args1, args2);
290                 let mut args = if let Some(llextra) = place.llextra {
291                     args2 = [place.llval, llextra];
292                     &args2[..]
293                 } else {
294                     args1 = [place.llval];
295                     &args1[..]
296                 };
297                 let (drop_fn, fn_ty) = match ty.sty {
298                     ty::TyDynamic(..) => {
299                         let fn_ty = drop_fn.ty(bx.cx.tcx);
300                         let sig = common::ty_fn_sig(bx.cx, fn_ty);
301                         let sig = bx.tcx().normalize_erasing_late_bound_regions(
302                             ty::ParamEnv::reveal_all(),
303                             &sig,
304                         );
305                         let fn_ty = FnType::new_vtable(bx.cx, sig, &[]);
306                         let vtable = args[1];
307                         args = &args[..1];
308                         (meth::DESTRUCTOR.get_fn(&bx, vtable, &fn_ty), fn_ty)
309                     }
310                     _ => {
311                         (callee::get_fn(bx.cx, drop_fn),
312                          FnType::of_instance(bx.cx, &drop_fn))
313                     }
314                 };
315                 do_call(self, bx, fn_ty, drop_fn, args,
316                         Some((ReturnDest::Nothing, target)),
317                         unwind);
318             }
319
320             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
321                 let cond = self.codegen_operand(&bx, cond).immediate();
322                 let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1);
323
324                 // This case can currently arise only from functions marked
325                 // with #[rustc_inherit_overflow_checks] and inlined from
326                 // another crate (mostly core::num generic/#[inline] fns),
327                 // while the current crate doesn't use overflow checks.
328                 // NOTE: Unlike binops, negation doesn't have its own
329                 // checked operation, just a comparison with the minimum
330                 // value, so we have to check for the assert message.
331                 if !bx.cx.check_overflow {
332                     if let mir::interpret::EvalErrorKind::OverflowNeg = *msg {
333                         const_cond = Some(expected);
334                     }
335                 }
336
337                 // Don't codegen the panic block if success if known.
338                 if const_cond == Some(expected) {
339                     funclet_br(self, bx, target);
340                     return;
341                 }
342
343                 // Pass the condition through llvm.expect for branch hinting.
344                 let expect = bx.cx.get_intrinsic(&"llvm.expect.i1");
345                 let cond = bx.call(expect, &[cond, C_bool(bx.cx, expected)], None);
346
347                 // Create the failure block and the conditional branch to it.
348                 let lltarget = llblock(self, target);
349                 let panic_block = self.new_block("panic");
350                 if expected {
351                     bx.cond_br(cond, lltarget, panic_block.llbb());
352                 } else {
353                     bx.cond_br(cond, panic_block.llbb(), lltarget);
354                 }
355
356                 // After this point, bx is the block for the call to panic.
357                 bx = panic_block;
358                 self.set_debug_loc(&bx, terminator.source_info);
359
360                 // Get the location information.
361                 let loc = bx.sess().codemap().lookup_char_pos(span.lo());
362                 let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
363                 let filename = C_str_slice(bx.cx, filename);
364                 let line = C_u32(bx.cx, loc.line as u32);
365                 let col = C_u32(bx.cx, loc.col.to_usize() as u32 + 1);
366                 let align = tcx.data_layout.aggregate_align
367                     .max(tcx.data_layout.i32_align)
368                     .max(tcx.data_layout.pointer_align);
369
370                 // Put together the arguments to the panic entry point.
371                 let (lang_item, args) = match *msg {
372                     EvalErrorKind::BoundsCheck { ref len, ref index } => {
373                         let len = self.codegen_operand(&mut bx, len).immediate();
374                         let index = self.codegen_operand(&mut bx, index).immediate();
375
376                         let file_line_col = C_struct(bx.cx, &[filename, line, col], false);
377                         let file_line_col = consts::addr_of(bx.cx,
378                                                             file_line_col,
379                                                             align,
380                                                             "panic_bounds_check_loc");
381                         (lang_items::PanicBoundsCheckFnLangItem,
382                          vec![file_line_col, index, len])
383                     }
384                     _ => {
385                         let str = msg.description();
386                         let msg_str = Symbol::intern(str).as_str();
387                         let msg_str = C_str_slice(bx.cx, msg_str);
388                         let msg_file_line_col = C_struct(bx.cx,
389                                                      &[msg_str, filename, line, col],
390                                                      false);
391                         let msg_file_line_col = consts::addr_of(bx.cx,
392                                                                 msg_file_line_col,
393                                                                 align,
394                                                                 "panic_loc");
395                         (lang_items::PanicFnLangItem,
396                          vec![msg_file_line_col])
397                     }
398                 };
399
400                 // Obtain the panic entry point.
401                 let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
402                 let instance = ty::Instance::mono(bx.tcx(), def_id);
403                 let fn_ty = FnType::of_instance(bx.cx, &instance);
404                 let llfn = callee::get_fn(bx.cx, instance);
405
406                 // Codegen the actual panic invoke/call.
407                 do_call(self, bx, fn_ty, llfn, &args, None, cleanup);
408             }
409
410             mir::TerminatorKind::DropAndReplace { .. } => {
411                 bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
412             }
413
414             mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
415                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
416                 let callee = self.codegen_operand(&bx, func);
417
418                 let (instance, mut llfn) = match callee.layout.ty.sty {
419                     ty::TyFnDef(def_id, substs) => {
420                         (Some(ty::Instance::resolve(bx.cx.tcx,
421                                                     ty::ParamEnv::reveal_all(),
422                                                     def_id,
423                                                     substs).unwrap()),
424                          None)
425                     }
426                     ty::TyFnPtr(_) => {
427                         (None, Some(callee.immediate()))
428                     }
429                     _ => bug!("{} is not callable", callee.layout.ty)
430                 };
431                 let def = instance.map(|i| i.def);
432                 let sig = callee.layout.ty.fn_sig(bx.tcx());
433                 let sig = bx.tcx().normalize_erasing_late_bound_regions(
434                     ty::ParamEnv::reveal_all(),
435                     &sig,
436                 );
437                 let abi = sig.abi;
438
439                 // Handle intrinsics old codegen wants Expr's for, ourselves.
440                 let intrinsic = match def {
441                     Some(ty::InstanceDef::Intrinsic(def_id))
442                         => Some(bx.tcx().item_name(def_id).as_str()),
443                     _ => None
444                 };
445                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
446
447                 if intrinsic == Some("transmute") {
448                     if let Some(destination_ref) = destination.as_ref() {
449                         let &(ref dest, target) = destination_ref;
450                         self.codegen_transmute(&bx, &args[0], dest);
451                         funclet_br(self, bx, target);
452                     } else {
453                         // If we are trying to transmute to an uninhabited type,
454                         // it is likely there is no allotted destination. In fact,
455                         // transmuting to an uninhabited type is UB, which means
456                         // we can do what we like. Here, we declare that transmuting
457                         // into an uninhabited type is impossible, so anything following
458                         // it must be unreachable.
459                         assert_eq!(bx.cx.layout_of(sig.output()).abi, layout::Abi::Uninhabited);
460                         bx.unreachable();
461                     }
462                     return;
463                 }
464
465                 let extra_args = &args[sig.inputs().len()..];
466                 let extra_args = extra_args.iter().map(|op_arg| {
467                     let op_ty = op_arg.ty(self.mir, bx.tcx());
468                     self.monomorphize(&op_ty)
469                 }).collect::<Vec<_>>();
470
471                 let fn_ty = match def {
472                     Some(ty::InstanceDef::Virtual(..)) => {
473                         FnType::new_vtable(bx.cx, sig, &extra_args)
474                     }
475                     Some(ty::InstanceDef::DropGlue(_, None)) => {
476                         // empty drop glue - a nop.
477                         let &(_, target) = destination.as_ref().unwrap();
478                         funclet_br(self, bx, target);
479                         return;
480                     }
481                     _ => FnType::new(bx.cx, sig, &extra_args)
482                 };
483
484                 // The arguments we'll be passing. Plus one to account for outptr, if used.
485                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
486                 let mut llargs = Vec::with_capacity(arg_count);
487
488                 // Prepare the return value destination
489                 let ret_dest = if let Some((ref dest, _)) = *destination {
490                     let is_intrinsic = intrinsic.is_some();
491                     self.make_return_dest(&bx, dest, &fn_ty.ret, &mut llargs,
492                                           is_intrinsic)
493                 } else {
494                     ReturnDest::Nothing
495                 };
496
497                 if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
498                     use intrinsic::codegen_intrinsic_call;
499
500                     let dest = match ret_dest {
501                         _ if fn_ty.ret.is_indirect() => llargs[0],
502                         ReturnDest::Nothing => {
503                             C_undef(fn_ty.ret.memory_ty(bx.cx).ptr_to())
504                         }
505                         ReturnDest::IndirectOperand(dst, _) |
506                         ReturnDest::Store(dst) => dst.llval,
507                         ReturnDest::DirectOperand(_) =>
508                             bug!("Cannot use direct operand with an intrinsic call")
509                     };
510
511                     let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
512                         // The indices passed to simd_shuffle* in the
513                         // third argument must be constant. This is
514                         // checked by const-qualification, which also
515                         // promotes any complex rvalues to constants.
516                         if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
517                             match *arg {
518                                 // The shuffle array argument is usually not an explicit constant,
519                                 // but specified directly in the code. This means it gets promoted
520                                 // and we can then extract the value by evaluating the promoted.
521                                 mir::Operand::Copy(mir::Place::Promoted(box(index, ty))) |
522                                 mir::Operand::Move(mir::Place::Promoted(box(index, ty))) => {
523                                     let param_env = ty::ParamEnv::reveal_all();
524                                     let cid = mir::interpret::GlobalId {
525                                         instance: self.instance,
526                                         promoted: Some(index),
527                                     };
528                                     let c = bx.tcx().const_eval(param_env.and(cid));
529                                     let (llval, ty) = self.simd_shuffle_indices(
530                                         &bx,
531                                         terminator.source_info.span,
532                                         ty,
533                                         c,
534                                     );
535                                     return OperandRef {
536                                         val: Immediate(llval),
537                                         layout: bx.cx.layout_of(ty),
538                                     };
539
540                                 },
541                                 mir::Operand::Copy(_) |
542                                 mir::Operand::Move(_) => {
543                                     span_bug!(span, "shuffle indices must be constant");
544                                 }
545                                 mir::Operand::Constant(ref constant) => {
546                                     let c = self.eval_mir_constant(&bx, constant);
547                                     let (llval, ty) = self.simd_shuffle_indices(
548                                         &bx,
549                                         constant.span,
550                                         constant.ty,
551                                         c,
552                                     );
553                                     return OperandRef {
554                                         val: Immediate(llval),
555                                         layout: bx.cx.layout_of(ty)
556                                     };
557                                 }
558                             }
559                         }
560
561                         self.codegen_operand(&bx, arg)
562                     }).collect();
563
564
565                     let callee_ty = instance.as_ref().unwrap().ty(bx.cx.tcx);
566                     codegen_intrinsic_call(&bx, callee_ty, &fn_ty, &args, dest,
567                                          terminator.source_info.span);
568
569                     if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
570                         self.store_return(&bx, ret_dest, &fn_ty.ret, dst.llval);
571                     }
572
573                     if let Some((_, target)) = *destination {
574                         funclet_br(self, bx, target);
575                     } else {
576                         bx.unreachable();
577                     }
578
579                     return;
580                 }
581
582                 // Split the rust-call tupled arguments off.
583                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
584                     let (tup, args) = args.split_last().unwrap();
585                     (args, Some(tup))
586                 } else {
587                     (&args[..], None)
588                 };
589
590                 for (i, arg) in first_args.iter().enumerate() {
591                     let mut op = self.codegen_operand(&bx, arg);
592                     if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
593                         if let Pair(data_ptr, meta) = op.val {
594                             llfn = Some(meth::VirtualIndex::from_index(idx)
595                                 .get_fn(&bx, meta, &fn_ty));
596                             llargs.push(data_ptr);
597                             continue;
598                         }
599                     }
600
601                     // The callee needs to own the argument memory if we pass it
602                     // by-ref, so make a local copy of non-immediate constants.
603                     match (arg, op.val) {
604                         (&mir::Operand::Copy(_), Ref(..)) |
605                         (&mir::Operand::Constant(_), Ref(..)) => {
606                             let tmp = PlaceRef::alloca(&bx, op.layout, "const");
607                             op.val.store(&bx, tmp);
608                             op.val = Ref(tmp.llval, tmp.align);
609                         }
610                         _ => {}
611                     }
612
613                     self.codegen_argument(&bx, op, &mut llargs, &fn_ty.args[i]);
614                 }
615                 if let Some(tup) = untuple {
616                     self.codegen_arguments_untupled(&bx, tup, &mut llargs,
617                         &fn_ty.args[first_args.len()..])
618                 }
619
620                 let fn_ptr = match (llfn, instance) {
621                     (Some(llfn), _) => llfn,
622                     (None, Some(instance)) => callee::get_fn(bx.cx, instance),
623                     _ => span_bug!(span, "no llfn for call"),
624                 };
625
626                 do_call(self, bx, fn_ty, fn_ptr, &llargs,
627                         destination.as_ref().map(|&(_, target)| (ret_dest, target)),
628                         cleanup);
629             }
630             mir::TerminatorKind::GeneratorDrop |
631             mir::TerminatorKind::Yield { .. } => bug!("generator ops in codegen"),
632             mir::TerminatorKind::FalseEdges { .. } |
633             mir::TerminatorKind::FalseUnwind { .. } => bug!("borrowck false edges in codegen"),
634         }
635     }
636
637     fn codegen_argument(&mut self,
638                       bx: &Builder<'a, 'll, 'tcx>,
639                       op: OperandRef<'ll, 'tcx>,
640                       llargs: &mut Vec<&'ll Value>,
641                       arg: &ArgType<'tcx, Ty<'tcx>>) {
642         // Fill padding with undef value, where applicable.
643         if let Some(ty) = arg.pad {
644             llargs.push(C_undef(ty.llvm_type(bx.cx)));
645         }
646
647         if arg.is_ignore() {
648             return;
649         }
650
651         if let PassMode::Pair(..) = arg.mode {
652             match op.val {
653                 Pair(a, b) => {
654                     llargs.push(a);
655                     llargs.push(b);
656                     return;
657                 }
658                 _ => bug!("codegen_argument: {:?} invalid for pair arugment", op)
659             }
660         }
661
662         // Force by-ref if we have to load through a cast pointer.
663         let (mut llval, align, by_ref) = match op.val {
664             Immediate(_) | Pair(..) => {
665                 match arg.mode {
666                     PassMode::Indirect(_) | PassMode::Cast(_) => {
667                         let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
668                         op.val.store(bx, scratch);
669                         (scratch.llval, scratch.align, true)
670                     }
671                     _ => {
672                         (op.immediate_or_packed_pair(bx), arg.layout.align, false)
673                     }
674                 }
675             }
676             Ref(llval, align) => {
677                 if arg.is_indirect() && align.abi() < arg.layout.align.abi() {
678                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
679                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
680                     // have scary latent bugs around.
681
682                     let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
683                     base::memcpy_ty(bx, scratch.llval, llval, op.layout, align, MemFlags::empty());
684                     (scratch.llval, scratch.align, true)
685                 } else {
686                     (llval, align, true)
687                 }
688             }
689         };
690
691         if by_ref && !arg.is_indirect() {
692             // Have to load the argument, maybe while casting it.
693             if let PassMode::Cast(ty) = arg.mode {
694                 llval = bx.load(bx.pointercast(llval, ty.llvm_type(bx.cx).ptr_to()),
695                                  align.min(arg.layout.align));
696             } else {
697                 // We can't use `PlaceRef::load` here because the argument
698                 // may have a type we don't treat as immediate, but the ABI
699                 // used for this call is passing it by-value. In that case,
700                 // the load would just produce `OperandValue::Ref` instead
701                 // of the `OperandValue::Immediate` we need for the call.
702                 llval = bx.load(llval, align);
703                 if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
704                     if scalar.is_bool() {
705                         bx.range_metadata(llval, 0..2);
706                     }
707                 }
708                 // We store bools as i8 so we need to truncate to i1.
709                 llval = base::to_immediate(bx, llval, arg.layout);
710             }
711         }
712
713         llargs.push(llval);
714     }
715
716     fn codegen_arguments_untupled(&mut self,
717                                 bx: &Builder<'a, 'll, 'tcx>,
718                                 operand: &mir::Operand<'tcx>,
719                                 llargs: &mut Vec<&'ll Value>,
720                                 args: &[ArgType<'tcx, Ty<'tcx>>]) {
721         let tuple = self.codegen_operand(bx, operand);
722
723         // Handle both by-ref and immediate tuples.
724         if let Ref(llval, align) = tuple.val {
725             let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align);
726             for i in 0..tuple.layout.fields.count() {
727                 let field_ptr = tuple_ptr.project_field(bx, i);
728                 self.codegen_argument(bx, field_ptr.load(bx), llargs, &args[i]);
729             }
730         } else {
731             // If the tuple is immediate, the elements are as well.
732             for i in 0..tuple.layout.fields.count() {
733                 let op = tuple.extract_field(bx, i);
734                 self.codegen_argument(bx, op, llargs, &args[i]);
735             }
736         }
737     }
738
739     fn get_personality_slot(&mut self, bx: &Builder<'a, 'll, 'tcx>) -> PlaceRef<'ll, 'tcx> {
740         let cx = bx.cx;
741         if let Some(slot) = self.personality_slot {
742             slot
743         } else {
744             let layout = cx.layout_of(cx.tcx.intern_tup(&[
745                 cx.tcx.mk_mut_ptr(cx.tcx.types.u8),
746                 cx.tcx.types.i32
747             ]));
748             let slot = PlaceRef::alloca(bx, layout, "personalityslot");
749             self.personality_slot = Some(slot);
750             slot
751         }
752     }
753
754     /// Return the landingpad wrapper around the given basic block
755     ///
756     /// No-op in MSVC SEH scheme.
757     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> &'ll BasicBlock {
758         if let Some(block) = self.landing_pads[target_bb] {
759             return block;
760         }
761
762         let block = self.blocks[target_bb];
763         let landing_pad = self.landing_pad_uncached(block);
764         self.landing_pads[target_bb] = Some(landing_pad);
765         landing_pad
766     }
767
768     fn landing_pad_uncached(&mut self, target_bb: &'ll BasicBlock) -> &'ll BasicBlock {
769         if base::wants_msvc_seh(self.cx.sess()) {
770             span_bug!(self.mir.span, "landing pad was not inserted?")
771         }
772
773         let bx = self.new_block("cleanup");
774
775         let llpersonality = self.cx.eh_personality();
776         let llretty = self.landing_pad_type();
777         let lp = bx.landing_pad(llretty, llpersonality, 1);
778         bx.set_cleanup(lp);
779
780         let slot = self.get_personality_slot(&bx);
781         slot.storage_live(&bx);
782         Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&bx, slot);
783
784         bx.br(target_bb);
785         bx.llbb()
786     }
787
788     fn landing_pad_type(&self) -> &'ll Type {
789         let cx = self.cx;
790         Type::struct_(cx, &[Type::i8p(cx), Type::i32(cx)], false)
791     }
792
793     fn unreachable_block(&mut self) -> &'ll BasicBlock {
794         self.unreachable_block.unwrap_or_else(|| {
795             let bl = self.new_block("unreachable");
796             bl.unreachable();
797             self.unreachable_block = Some(bl.llbb());
798             bl.llbb()
799         })
800     }
801
802     pub fn new_block(&self, name: &str) -> Builder<'a, 'll, 'tcx> {
803         Builder::new_block(self.cx, self.llfn, name)
804     }
805
806     pub fn build_block(&self, bb: mir::BasicBlock) -> Builder<'a, 'll, 'tcx> {
807         let bx = Builder::with_cx(self.cx);
808         bx.position_at_end(self.blocks[bb]);
809         bx
810     }
811
812     fn make_return_dest(&mut self, bx: &Builder<'a, 'll, 'tcx>,
813                         dest: &mir::Place<'tcx>, fn_ret: &ArgType<'tcx, Ty<'tcx>>,
814                         llargs: &mut Vec<&'ll Value>, is_intrinsic: bool)
815                         -> ReturnDest<'ll, 'tcx> {
816         // If the return is ignored, we can just return a do-nothing ReturnDest
817         if fn_ret.is_ignore() {
818             return ReturnDest::Nothing;
819         }
820         let dest = if let mir::Place::Local(index) = *dest {
821             match self.locals[index] {
822                 LocalRef::Place(dest) => dest,
823                 LocalRef::Operand(None) => {
824                     // Handle temporary places, specifically Operand ones, as
825                     // they don't have allocas
826                     return if fn_ret.is_indirect() {
827                         // Odd, but possible, case, we have an operand temporary,
828                         // but the calling convention has an indirect return.
829                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
830                         tmp.storage_live(bx);
831                         llargs.push(tmp.llval);
832                         ReturnDest::IndirectOperand(tmp, index)
833                     } else if is_intrinsic {
834                         // Currently, intrinsics always need a location to store
835                         // the result. so we create a temporary alloca for the
836                         // result
837                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
838                         tmp.storage_live(bx);
839                         ReturnDest::IndirectOperand(tmp, index)
840                     } else {
841                         ReturnDest::DirectOperand(index)
842                     };
843                 }
844                 LocalRef::Operand(Some(_)) => {
845                     bug!("place local already assigned to");
846                 }
847             }
848         } else {
849             self.codegen_place(bx, dest)
850         };
851         if fn_ret.is_indirect() {
852             if dest.align.abi() < dest.layout.align.abi() {
853                 // Currently, MIR code generation does not create calls
854                 // that store directly to fields of packed structs (in
855                 // fact, the calls it creates write only to temps),
856                 //
857                 // If someone changes that, please update this code path
858                 // to create a temporary.
859                 span_bug!(self.mir.span, "can't directly store to unaligned value");
860             }
861             llargs.push(dest.llval);
862             ReturnDest::Nothing
863         } else {
864             ReturnDest::Store(dest)
865         }
866     }
867
868     fn codegen_transmute(&mut self, bx: &Builder<'a, 'll, 'tcx>,
869                        src: &mir::Operand<'tcx>,
870                        dst: &mir::Place<'tcx>) {
871         if let mir::Place::Local(index) = *dst {
872             match self.locals[index] {
873                 LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
874                 LocalRef::Operand(None) => {
875                     let dst_layout = bx.cx.layout_of(self.monomorphized_place_ty(dst));
876                     assert!(!dst_layout.ty.has_erasable_regions());
877                     let place = PlaceRef::alloca(bx, dst_layout, "transmute_temp");
878                     place.storage_live(bx);
879                     self.codegen_transmute_into(bx, src, place);
880                     let op = place.load(bx);
881                     place.storage_dead(bx);
882                     self.locals[index] = LocalRef::Operand(Some(op));
883                 }
884                 LocalRef::Operand(Some(op)) => {
885                     assert!(op.layout.is_zst(),
886                             "assigning to initialized SSAtemp");
887                 }
888             }
889         } else {
890             let dst = self.codegen_place(bx, dst);
891             self.codegen_transmute_into(bx, src, dst);
892         }
893     }
894
895     fn codegen_transmute_into(&mut self, bx: &Builder<'a, 'll, 'tcx>,
896                             src: &mir::Operand<'tcx>,
897                             dst: PlaceRef<'ll, 'tcx>) {
898         let src = self.codegen_operand(bx, src);
899         let llty = src.layout.llvm_type(bx.cx);
900         let cast_ptr = bx.pointercast(dst.llval, llty.ptr_to());
901         let align = src.layout.align.min(dst.layout.align);
902         src.val.store(bx, PlaceRef::new_sized(cast_ptr, src.layout, align));
903     }
904
905
906     // Stores the return value of a function call into it's final location.
907     fn store_return(&mut self,
908                     bx: &Builder<'a, 'll, 'tcx>,
909                     dest: ReturnDest<'ll, 'tcx>,
910                     ret_ty: &ArgType<'tcx, Ty<'tcx>>,
911                     llval: &'ll Value) {
912         use self::ReturnDest::*;
913
914         match dest {
915             Nothing => (),
916             Store(dst) => ret_ty.store(bx, llval, dst),
917             IndirectOperand(tmp, index) => {
918                 let op = tmp.load(bx);
919                 tmp.storage_dead(bx);
920                 self.locals[index] = LocalRef::Operand(Some(op));
921             }
922             DirectOperand(index) => {
923                 // If there is a cast, we have to store and reload.
924                 let op = if let PassMode::Cast(_) = ret_ty.mode {
925                     let tmp = PlaceRef::alloca(bx, ret_ty.layout, "tmp_ret");
926                     tmp.storage_live(bx);
927                     ret_ty.store(bx, llval, tmp);
928                     let op = tmp.load(bx);
929                     tmp.storage_dead(bx);
930                     op
931                 } else {
932                     OperandRef::from_immediate_or_packed_pair(bx, llval, ret_ty.layout)
933                 };
934                 self.locals[index] = LocalRef::Operand(Some(op));
935             }
936         }
937     }
938 }
939
940 enum ReturnDest<'ll, 'tcx> {
941     // Do nothing, the return value is indirect or ignored
942     Nothing,
943     // Store the return value to the pointer
944     Store(PlaceRef<'ll, 'tcx>),
945     // Stores an indirect return value to an operand local place
946     IndirectOperand(PlaceRef<'ll, 'tcx>, mir::Local),
947     // Stores a direct return value to an operand local place
948     DirectOperand(mir::Local)
949 }