]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/mir/block.rs
Implement simple codegen for unsized rvalues.
[rust.git] / src / librustc_codegen_llvm / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, BasicBlock};
12 use rustc::middle::lang_items;
13 use rustc::ty::{self, Ty, TypeFoldable};
14 use rustc::ty::layout::{self, LayoutOf};
15 use rustc::mir;
16 use rustc::mir::interpret::EvalErrorKind;
17 use abi::{Abi, ArgType, ArgTypeExt, FnType, FnTypeExt, LlvmType, PassMode};
18 use base;
19 use callee;
20 use builder::{Builder, MemFlags};
21 use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_uint_big, C_undef};
22 use consts;
23 use meth;
24 use monomorphize;
25 use type_of::LayoutLlvmExt;
26 use type_::Type;
27 use value::Value;
28
29 use syntax::symbol::Symbol;
30 use syntax_pos::Pos;
31
32 use super::{FunctionCx, LocalRef};
33 use super::place::PlaceRef;
34 use super::operand::OperandRef;
35 use super::operand::OperandValue::{Pair, Ref, UnsizedRef, Immediate};
36
37 impl FunctionCx<'a, 'll, 'tcx> {
38     pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
39         let mut bx = self.build_block(bb);
40         let data = &self.mir[bb];
41
42         debug!("codegen_block({:?}={:?})", bb, data);
43
44         for statement in &data.statements {
45             bx = self.codegen_statement(bx, statement);
46         }
47
48         self.codegen_terminator(bx, bb, data.terminator());
49     }
50
51     fn codegen_terminator(&mut self,
52                         mut bx: Builder<'a, 'll, 'tcx>,
53                         bb: mir::BasicBlock,
54                         terminator: &mir::Terminator<'tcx>)
55     {
56         debug!("codegen_terminator: {:?}", terminator);
57
58         // Create the cleanup bundle, if needed.
59         let tcx = bx.tcx();
60         let span = terminator.source_info.span;
61         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
62         let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
63
64         let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
65         let cleanup_bundle = funclet.map(|l| l.bundle());
66
67         let lltarget = |this: &mut Self, target: mir::BasicBlock| {
68             let lltarget = this.blocks[target];
69             let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
70             match (funclet_bb, target_funclet) {
71                 (None, None) => (lltarget, false),
72                 (Some(f), Some(t_f))
73                     if f == t_f || !base::wants_msvc_seh(tcx.sess)
74                     => (lltarget, false),
75                 (None, Some(_)) => {
76                     // jump *into* cleanup - need a landing pad if GNU
77                     (this.landing_pad_to(target), false)
78                 }
79                 (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
80                 (Some(_), Some(_)) => {
81                     (this.landing_pad_to(target), true)
82                 }
83             }
84         };
85
86         let llblock = |this: &mut Self, target: mir::BasicBlock| {
87             let (lltarget, is_cleanupret) = lltarget(this, target);
88             if is_cleanupret {
89                 // MSVC cross-funclet jump - need a trampoline
90
91                 debug!("llblock: creating cleanup trampoline for {:?}", target);
92                 let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
93                 let trampoline = this.new_block(name);
94                 trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
95                 trampoline.llbb()
96             } else {
97                 lltarget
98             }
99         };
100
101         let funclet_br = |this: &mut Self, bx: Builder<'_, 'll, '_>, target: mir::BasicBlock| {
102             let (lltarget, is_cleanupret) = lltarget(this, target);
103             if is_cleanupret {
104                 // micro-optimization: generate a `ret` rather than a jump
105                 // to a trampoline.
106                 bx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
107             } else {
108                 bx.br(lltarget);
109             }
110         };
111
112         let do_call = |
113             this: &mut Self,
114             bx: Builder<'a, 'll, 'tcx>,
115             fn_ty: FnType<'tcx, Ty<'tcx>>,
116             fn_ptr: &'ll Value,
117             llargs: &[&'ll Value],
118             destination: Option<(ReturnDest<'ll, 'tcx>, mir::BasicBlock)>,
119             cleanup: Option<mir::BasicBlock>
120         | {
121             if let Some(cleanup) = cleanup {
122                 let ret_bx = if let Some((_, target)) = destination {
123                     this.blocks[target]
124                 } else {
125                     this.unreachable_block()
126                 };
127                 let invokeret = bx.invoke(fn_ptr,
128                                            &llargs,
129                                            ret_bx,
130                                            llblock(this, cleanup),
131                                            cleanup_bundle);
132                 fn_ty.apply_attrs_callsite(&bx, invokeret);
133
134                 if let Some((ret_dest, target)) = destination {
135                     let ret_bx = this.build_block(target);
136                     this.set_debug_loc(&ret_bx, terminator.source_info);
137                     this.store_return(&ret_bx, ret_dest, &fn_ty.ret, invokeret);
138                 }
139             } else {
140                 let llret = bx.call(fn_ptr, &llargs, cleanup_bundle);
141                 fn_ty.apply_attrs_callsite(&bx, llret);
142                 if this.mir[bb].is_cleanup {
143                     // Cleanup is always the cold path. Don't inline
144                     // drop glue. Also, when there is a deeply-nested
145                     // struct, there are "symmetry" issues that cause
146                     // exponential inlining - see issue #41696.
147                     llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
148                 }
149
150                 if let Some((ret_dest, target)) = destination {
151                     this.store_return(&bx, ret_dest, &fn_ty.ret, llret);
152                     funclet_br(this, bx, target);
153                 } else {
154                     bx.unreachable();
155                 }
156             }
157         };
158
159         self.set_debug_loc(&bx, terminator.source_info);
160         match terminator.kind {
161             mir::TerminatorKind::Resume => {
162                 if let Some(cleanup_pad) = cleanup_pad {
163                     bx.cleanup_ret(cleanup_pad, None);
164                 } else {
165                     let slot = self.get_personality_slot(&bx);
166                     let lp0 = slot.project_field(&bx, 0).load(&bx).immediate();
167                     let lp1 = slot.project_field(&bx, 1).load(&bx).immediate();
168                     slot.storage_dead(&bx);
169
170                     if !bx.sess().target.target.options.custom_unwind_resume {
171                         let mut lp = C_undef(self.landing_pad_type());
172                         lp = bx.insert_value(lp, lp0, 0);
173                         lp = bx.insert_value(lp, lp1, 1);
174                         bx.resume(lp);
175                     } else {
176                         bx.call(bx.cx.eh_unwind_resume(), &[lp0], cleanup_bundle);
177                         bx.unreachable();
178                     }
179                 }
180             }
181
182             mir::TerminatorKind::Abort => {
183                 // Call core::intrinsics::abort()
184                 let fnname = bx.cx.get_intrinsic(&("llvm.trap"));
185                 bx.call(fnname, &[], None);
186                 bx.unreachable();
187             }
188
189             mir::TerminatorKind::Goto { target } => {
190                 funclet_br(self, bx, target);
191             }
192
193             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
194                 let discr = self.codegen_operand(&bx, discr);
195                 if targets.len() == 2 {
196                     // If there are two targets, emit br instead of switch
197                     let lltrue = llblock(self, targets[0]);
198                     let llfalse = llblock(self, targets[1]);
199                     if switch_ty == bx.tcx().types.bool {
200                         // Don't generate trivial icmps when switching on bool
201                         if let [0] = values[..] {
202                             bx.cond_br(discr.immediate(), llfalse, lltrue);
203                         } else {
204                             assert_eq!(&values[..], &[1]);
205                             bx.cond_br(discr.immediate(), lltrue, llfalse);
206                         }
207                     } else {
208                         let switch_llty = bx.cx.layout_of(switch_ty).immediate_llvm_type(bx.cx);
209                         let llval = C_uint_big(switch_llty, values[0]);
210                         let cmp = bx.icmp(llvm::IntEQ, discr.immediate(), llval);
211                         bx.cond_br(cmp, lltrue, llfalse);
212                     }
213                 } else {
214                     let (otherwise, targets) = targets.split_last().unwrap();
215                     let switch = bx.switch(discr.immediate(),
216                                             llblock(self, *otherwise), values.len());
217                     let switch_llty = bx.cx.layout_of(switch_ty).immediate_llvm_type(bx.cx);
218                     for (&value, target) in values.iter().zip(targets) {
219                         let llval = C_uint_big(switch_llty, value);
220                         let llbb = llblock(self, *target);
221                         bx.add_case(switch, llval, llbb)
222                     }
223                 }
224             }
225
226             mir::TerminatorKind::Return => {
227                 let llval = match self.fn_ty.ret.mode {
228                     PassMode::Ignore | PassMode::Indirect(_) => {
229                         bx.ret_void();
230                         return;
231                     }
232
233                     PassMode::Direct(_) | PassMode::Pair(..) => {
234                         let op = self.codegen_consume(&bx, &mir::Place::Local(mir::RETURN_PLACE));
235                         if let Ref(llval, align) = op.val {
236                             bx.load(llval, align)
237                         } else if let UnsizedRef(..) = op.val {
238                             bug!("return type must be sized");
239                         } else {
240                             op.immediate_or_packed_pair(&bx)
241                         }
242                     }
243
244                     PassMode::Cast(cast_ty) => {
245                         let op = match self.locals[mir::RETURN_PLACE] {
246                             LocalRef::Operand(Some(op)) => op,
247                             LocalRef::Operand(None) => bug!("use of return before def"),
248                             LocalRef::Place(cg_place) => {
249                                 OperandRef {
250                                     val: Ref(cg_place.llval, cg_place.align),
251                                     layout: cg_place.layout
252                                 }
253                             }
254                             LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
255                         };
256                         let llslot = match op.val {
257                             Immediate(_) | Pair(..) => {
258                                 let scratch = PlaceRef::alloca(&bx, self.fn_ty.ret.layout, "ret");
259                                 op.val.store(&bx, scratch);
260                                 scratch.llval
261                             }
262                             Ref(llval, align) => {
263                                 assert_eq!(align.abi(), op.layout.align.abi(),
264                                            "return place is unaligned!");
265                                 llval
266                             }
267                             UnsizedRef(..) => bug!("return type must be sized"),
268                         };
269                         bx.load(
270                             bx.pointercast(llslot, cast_ty.llvm_type(bx.cx).ptr_to()),
271                             self.fn_ty.ret.layout.align)
272                     }
273
274                     PassMode::UnsizedIndirect(..) => bug!("return value must be sized"),
275                 };
276                 bx.ret(llval);
277             }
278
279             mir::TerminatorKind::Unreachable => {
280                 bx.unreachable();
281             }
282
283             mir::TerminatorKind::Drop { ref location, target, unwind } => {
284                 let ty = location.ty(self.mir, bx.tcx()).to_ty(bx.tcx());
285                 let ty = self.monomorphize(&ty);
286                 let drop_fn = monomorphize::resolve_drop_in_place(bx.cx.tcx, ty);
287
288                 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
289                     // we don't actually need to drop anything.
290                     funclet_br(self, bx, target);
291                     return
292                 }
293
294                 let place = self.codegen_place(&bx, location);
295                 let (args1, args2);
296                 let mut args = if let Some(llextra) = place.llextra {
297                     args2 = [place.llval, llextra];
298                     &args2[..]
299                 } else {
300                     args1 = [place.llval];
301                     &args1[..]
302                 };
303                 let (drop_fn, fn_ty) = match ty.sty {
304                     ty::TyDynamic(..) => {
305                         let fn_ty = drop_fn.ty(bx.cx.tcx);
306                         let sig = common::ty_fn_sig(bx.cx, fn_ty);
307                         let sig = bx.tcx().normalize_erasing_late_bound_regions(
308                             ty::ParamEnv::reveal_all(),
309                             &sig,
310                         );
311                         let fn_ty = FnType::new_vtable(bx.cx, sig, &[]);
312                         let vtable = args[1];
313                         args = &args[..1];
314                         (meth::DESTRUCTOR.get_fn(&bx, vtable, &fn_ty), fn_ty)
315                     }
316                     _ => {
317                         (callee::get_fn(bx.cx, drop_fn),
318                          FnType::of_instance(bx.cx, &drop_fn))
319                     }
320                 };
321                 do_call(self, bx, fn_ty, drop_fn, args,
322                         Some((ReturnDest::Nothing, target)),
323                         unwind);
324             }
325
326             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
327                 let cond = self.codegen_operand(&bx, cond).immediate();
328                 let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1);
329
330                 // This case can currently arise only from functions marked
331                 // with #[rustc_inherit_overflow_checks] and inlined from
332                 // another crate (mostly core::num generic/#[inline] fns),
333                 // while the current crate doesn't use overflow checks.
334                 // NOTE: Unlike binops, negation doesn't have its own
335                 // checked operation, just a comparison with the minimum
336                 // value, so we have to check for the assert message.
337                 if !bx.cx.check_overflow {
338                     if let mir::interpret::EvalErrorKind::OverflowNeg = *msg {
339                         const_cond = Some(expected);
340                     }
341                 }
342
343                 // Don't codegen the panic block if success if known.
344                 if const_cond == Some(expected) {
345                     funclet_br(self, bx, target);
346                     return;
347                 }
348
349                 // Pass the condition through llvm.expect for branch hinting.
350                 let expect = bx.cx.get_intrinsic(&"llvm.expect.i1");
351                 let cond = bx.call(expect, &[cond, C_bool(bx.cx, expected)], None);
352
353                 // Create the failure block and the conditional branch to it.
354                 let lltarget = llblock(self, target);
355                 let panic_block = self.new_block("panic");
356                 if expected {
357                     bx.cond_br(cond, lltarget, panic_block.llbb());
358                 } else {
359                     bx.cond_br(cond, panic_block.llbb(), lltarget);
360                 }
361
362                 // After this point, bx is the block for the call to panic.
363                 bx = panic_block;
364                 self.set_debug_loc(&bx, terminator.source_info);
365
366                 // Get the location information.
367                 let loc = bx.sess().codemap().lookup_char_pos(span.lo());
368                 let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
369                 let filename = C_str_slice(bx.cx, filename);
370                 let line = C_u32(bx.cx, loc.line as u32);
371                 let col = C_u32(bx.cx, loc.col.to_usize() as u32 + 1);
372                 let align = tcx.data_layout.aggregate_align
373                     .max(tcx.data_layout.i32_align)
374                     .max(tcx.data_layout.pointer_align);
375
376                 // Put together the arguments to the panic entry point.
377                 let (lang_item, args) = match *msg {
378                     EvalErrorKind::BoundsCheck { ref len, ref index } => {
379                         let len = self.codegen_operand(&mut bx, len).immediate();
380                         let index = self.codegen_operand(&mut bx, index).immediate();
381
382                         let file_line_col = C_struct(bx.cx, &[filename, line, col], false);
383                         let file_line_col = consts::addr_of(bx.cx,
384                                                             file_line_col,
385                                                             align,
386                                                             Some("panic_bounds_check_loc"));
387                         (lang_items::PanicBoundsCheckFnLangItem,
388                          vec![file_line_col, index, len])
389                     }
390                     _ => {
391                         let str = msg.description();
392                         let msg_str = Symbol::intern(str).as_str();
393                         let msg_str = C_str_slice(bx.cx, msg_str);
394                         let msg_file_line_col = C_struct(bx.cx,
395                                                      &[msg_str, filename, line, col],
396                                                      false);
397                         let msg_file_line_col = consts::addr_of(bx.cx,
398                                                                 msg_file_line_col,
399                                                                 align,
400                                                                 Some("panic_loc"));
401                         (lang_items::PanicFnLangItem,
402                          vec![msg_file_line_col])
403                     }
404                 };
405
406                 // Obtain the panic entry point.
407                 let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
408                 let instance = ty::Instance::mono(bx.tcx(), def_id);
409                 let fn_ty = FnType::of_instance(bx.cx, &instance);
410                 let llfn = callee::get_fn(bx.cx, instance);
411
412                 // Codegen the actual panic invoke/call.
413                 do_call(self, bx, fn_ty, llfn, &args, None, cleanup);
414             }
415
416             mir::TerminatorKind::DropAndReplace { .. } => {
417                 bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
418             }
419
420             mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
421                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
422                 let callee = self.codegen_operand(&bx, func);
423
424                 let (instance, mut llfn) = match callee.layout.ty.sty {
425                     ty::TyFnDef(def_id, substs) => {
426                         (Some(ty::Instance::resolve(bx.cx.tcx,
427                                                     ty::ParamEnv::reveal_all(),
428                                                     def_id,
429                                                     substs).unwrap()),
430                          None)
431                     }
432                     ty::TyFnPtr(_) => {
433                         (None, Some(callee.immediate()))
434                     }
435                     _ => bug!("{} is not callable", callee.layout.ty)
436                 };
437                 let def = instance.map(|i| i.def);
438                 let sig = callee.layout.ty.fn_sig(bx.tcx());
439                 let sig = bx.tcx().normalize_erasing_late_bound_regions(
440                     ty::ParamEnv::reveal_all(),
441                     &sig,
442                 );
443                 let abi = sig.abi;
444
445                 // Handle intrinsics old codegen wants Expr's for, ourselves.
446                 let intrinsic = match def {
447                     Some(ty::InstanceDef::Intrinsic(def_id))
448                         => Some(bx.tcx().item_name(def_id).as_str()),
449                     _ => None
450                 };
451                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
452
453                 if intrinsic == Some("transmute") {
454                     if let Some(destination_ref) = destination.as_ref() {
455                         let &(ref dest, target) = destination_ref;
456                         self.codegen_transmute(&bx, &args[0], dest);
457                         funclet_br(self, bx, target);
458                     } else {
459                         // If we are trying to transmute to an uninhabited type,
460                         // it is likely there is no allotted destination. In fact,
461                         // transmuting to an uninhabited type is UB, which means
462                         // we can do what we like. Here, we declare that transmuting
463                         // into an uninhabited type is impossible, so anything following
464                         // it must be unreachable.
465                         assert_eq!(bx.cx.layout_of(sig.output()).abi, layout::Abi::Uninhabited);
466                         bx.unreachable();
467                     }
468                     return;
469                 }
470
471                 let extra_args = &args[sig.inputs().len()..];
472                 let extra_args = extra_args.iter().map(|op_arg| {
473                     let op_ty = op_arg.ty(self.mir, bx.tcx());
474                     self.monomorphize(&op_ty)
475                 }).collect::<Vec<_>>();
476
477                 let fn_ty = match def {
478                     Some(ty::InstanceDef::Virtual(..)) => {
479                         FnType::new_vtable(bx.cx, sig, &extra_args)
480                     }
481                     Some(ty::InstanceDef::DropGlue(_, None)) => {
482                         // empty drop glue - a nop.
483                         let &(_, target) = destination.as_ref().unwrap();
484                         funclet_br(self, bx, target);
485                         return;
486                     }
487                     _ => FnType::new(bx.cx, sig, &extra_args)
488                 };
489
490                 // The arguments we'll be passing. Plus one to account for outptr, if used.
491                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
492                 let mut llargs = Vec::with_capacity(arg_count);
493
494                 // Prepare the return value destination
495                 let ret_dest = if let Some((ref dest, _)) = *destination {
496                     let is_intrinsic = intrinsic.is_some();
497                     self.make_return_dest(&bx, dest, &fn_ty.ret, &mut llargs,
498                                           is_intrinsic)
499                 } else {
500                     ReturnDest::Nothing
501                 };
502
503                 if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
504                     use intrinsic::codegen_intrinsic_call;
505
506                     let dest = match ret_dest {
507                         _ if fn_ty.ret.is_indirect() => llargs[0],
508                         ReturnDest::Nothing => {
509                             C_undef(fn_ty.ret.memory_ty(bx.cx).ptr_to())
510                         }
511                         ReturnDest::IndirectOperand(dst, _) |
512                         ReturnDest::Store(dst) => dst.llval,
513                         ReturnDest::DirectOperand(_) =>
514                             bug!("Cannot use direct operand with an intrinsic call")
515                     };
516
517                     let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
518                         // The indices passed to simd_shuffle* in the
519                         // third argument must be constant. This is
520                         // checked by const-qualification, which also
521                         // promotes any complex rvalues to constants.
522                         if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
523                             match *arg {
524                                 // The shuffle array argument is usually not an explicit constant,
525                                 // but specified directly in the code. This means it gets promoted
526                                 // and we can then extract the value by evaluating the promoted.
527                                 mir::Operand::Copy(mir::Place::Promoted(box(index, ty))) |
528                                 mir::Operand::Move(mir::Place::Promoted(box(index, ty))) => {
529                                     let param_env = ty::ParamEnv::reveal_all();
530                                     let cid = mir::interpret::GlobalId {
531                                         instance: self.instance,
532                                         promoted: Some(index),
533                                     };
534                                     let c = bx.tcx().const_eval(param_env.and(cid));
535                                     let (llval, ty) = self.simd_shuffle_indices(
536                                         &bx,
537                                         terminator.source_info.span,
538                                         ty,
539                                         c,
540                                     );
541                                     return OperandRef {
542                                         val: Immediate(llval),
543                                         layout: bx.cx.layout_of(ty),
544                                     };
545
546                                 },
547                                 mir::Operand::Copy(_) |
548                                 mir::Operand::Move(_) => {
549                                     span_bug!(span, "shuffle indices must be constant");
550                                 }
551                                 mir::Operand::Constant(ref constant) => {
552                                     let c = self.eval_mir_constant(&bx, constant);
553                                     let (llval, ty) = self.simd_shuffle_indices(
554                                         &bx,
555                                         constant.span,
556                                         constant.ty,
557                                         c,
558                                     );
559                                     return OperandRef {
560                                         val: Immediate(llval),
561                                         layout: bx.cx.layout_of(ty)
562                                     };
563                                 }
564                             }
565                         }
566
567                         self.codegen_operand(&bx, arg)
568                     }).collect();
569
570
571                     let callee_ty = instance.as_ref().unwrap().ty(bx.cx.tcx);
572                     codegen_intrinsic_call(&bx, callee_ty, &fn_ty, &args, dest,
573                                          terminator.source_info.span);
574
575                     if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
576                         self.store_return(&bx, ret_dest, &fn_ty.ret, dst.llval);
577                     }
578
579                     if let Some((_, target)) = *destination {
580                         funclet_br(self, bx, target);
581                     } else {
582                         bx.unreachable();
583                     }
584
585                     return;
586                 }
587
588                 // Split the rust-call tupled arguments off.
589                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
590                     let (tup, args) = args.split_last().unwrap();
591                     (args, Some(tup))
592                 } else {
593                     (&args[..], None)
594                 };
595
596                 for (i, arg) in first_args.iter().enumerate() {
597                     let mut op = self.codegen_operand(&bx, arg);
598                     if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
599                         if let Pair(data_ptr, meta) = op.val {
600                             llfn = Some(meth::VirtualIndex::from_index(idx)
601                                 .get_fn(&bx, meta, &fn_ty));
602                             llargs.push(data_ptr);
603                             continue;
604                         }
605                     }
606
607                     // The callee needs to own the argument memory if we pass it
608                     // by-ref, so make a local copy of non-immediate constants.
609                     match (arg, op.val) {
610                         (&mir::Operand::Copy(_), Ref(..)) |
611                         (&mir::Operand::Constant(_), Ref(..)) => {
612                             let tmp = PlaceRef::alloca(&bx, op.layout, "const");
613                             op.val.store(&bx, tmp);
614                             op.val = Ref(tmp.llval, tmp.align);
615                         }
616                         (&mir::Operand::Copy(_), UnsizedRef(..)) |
617                         (&mir::Operand::Constant(_), UnsizedRef(..)) => {
618                             bug!("tried to pass an unsized argument by copy or constant")
619                         }
620                         _ => {}
621                     }
622
623                     self.codegen_argument(&bx, op, &mut llargs, &fn_ty.args[i]);
624                 }
625                 if let Some(tup) = untuple {
626                     self.codegen_arguments_untupled(&bx, tup, &mut llargs,
627                         &fn_ty.args[first_args.len()..])
628                 }
629
630                 let fn_ptr = match (llfn, instance) {
631                     (Some(llfn), _) => llfn,
632                     (None, Some(instance)) => callee::get_fn(bx.cx, instance),
633                     _ => span_bug!(span, "no llfn for call"),
634                 };
635
636                 do_call(self, bx, fn_ty, fn_ptr, &llargs,
637                         destination.as_ref().map(|&(_, target)| (ret_dest, target)),
638                         cleanup);
639             }
640             mir::TerminatorKind::GeneratorDrop |
641             mir::TerminatorKind::Yield { .. } => bug!("generator ops in codegen"),
642             mir::TerminatorKind::FalseEdges { .. } |
643             mir::TerminatorKind::FalseUnwind { .. } => bug!("borrowck false edges in codegen"),
644         }
645     }
646
647     fn codegen_argument(&mut self,
648                       bx: &Builder<'a, 'll, 'tcx>,
649                       op: OperandRef<'ll, 'tcx>,
650                       llargs: &mut Vec<&'ll Value>,
651                       arg: &ArgType<'tcx, Ty<'tcx>>) {
652         // Fill padding with undef value, where applicable.
653         if let Some(ty) = arg.pad {
654             llargs.push(C_undef(ty.llvm_type(bx.cx)));
655         }
656
657         if arg.is_ignore() {
658             return;
659         }
660
661         if let PassMode::Pair(..) = arg.mode {
662             match op.val {
663                 Pair(a, b) => {
664                     llargs.push(a);
665                     llargs.push(b);
666                     return;
667                 }
668                 _ => bug!("codegen_argument: {:?} invalid for pair arugment", op)
669             }
670         } else if let PassMode::UnsizedIndirect(..) = arg.mode {
671             match op.val {
672                 UnsizedRef(a, b) => {
673                     llargs.push(a);
674                     llargs.push(b);
675                     return;
676                 }
677                 _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op)
678             }
679         }
680
681         // Force by-ref if we have to load through a cast pointer.
682         let (mut llval, align, by_ref) = match op.val {
683             Immediate(_) | Pair(..) => {
684                 match arg.mode {
685                     PassMode::Indirect(_) | PassMode::Cast(_) => {
686                         let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
687                         op.val.store(bx, scratch);
688                         (scratch.llval, scratch.align, true)
689                     }
690                     _ => {
691                         (op.immediate_or_packed_pair(bx), arg.layout.align, false)
692                     }
693                 }
694             }
695             Ref(llval, align) => {
696                 if arg.is_indirect() && align.abi() < arg.layout.align.abi() {
697                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
698                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
699                     // have scary latent bugs around.
700
701                     let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
702                     base::memcpy_ty(bx, scratch.llval, llval, op.layout, align, MemFlags::empty());
703                     (scratch.llval, scratch.align, true)
704                 } else {
705                     (llval, align, true)
706                 }
707             }
708             UnsizedRef(..) =>
709                 bug!("codegen_argument: tried to pass unsized operand to sized argument"),
710         };
711
712         if by_ref && !arg.is_indirect() {
713             // Have to load the argument, maybe while casting it.
714             if let PassMode::Cast(ty) = arg.mode {
715                 llval = bx.load(bx.pointercast(llval, ty.llvm_type(bx.cx).ptr_to()),
716                                  align.min(arg.layout.align));
717             } else {
718                 // We can't use `PlaceRef::load` here because the argument
719                 // may have a type we don't treat as immediate, but the ABI
720                 // used for this call is passing it by-value. In that case,
721                 // the load would just produce `OperandValue::Ref` instead
722                 // of the `OperandValue::Immediate` we need for the call.
723                 llval = bx.load(llval, align);
724                 if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
725                     if scalar.is_bool() {
726                         bx.range_metadata(llval, 0..2);
727                     }
728                 }
729                 // We store bools as i8 so we need to truncate to i1.
730                 llval = base::to_immediate(bx, llval, arg.layout);
731             }
732         }
733
734         llargs.push(llval);
735     }
736
737     fn codegen_arguments_untupled(&mut self,
738                                 bx: &Builder<'a, 'll, 'tcx>,
739                                 operand: &mir::Operand<'tcx>,
740                                 llargs: &mut Vec<&'ll Value>,
741                                 args: &[ArgType<'tcx, Ty<'tcx>>]) {
742         let tuple = self.codegen_operand(bx, operand);
743
744         // Handle both by-ref and immediate tuples.
745         if let Ref(llval, align) = tuple.val {
746             let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align);
747             for i in 0..tuple.layout.fields.count() {
748                 let field_ptr = tuple_ptr.project_field(bx, i);
749                 self.codegen_argument(bx, field_ptr.load(bx), llargs, &args[i]);
750             }
751         } else if let UnsizedRef(..) = tuple.val {
752             bug!("closure arguments must be sized")
753         } else {
754             // If the tuple is immediate, the elements are as well.
755             for i in 0..tuple.layout.fields.count() {
756                 let op = tuple.extract_field(bx, i);
757                 self.codegen_argument(bx, op, llargs, &args[i]);
758             }
759         }
760     }
761
762     fn get_personality_slot(&mut self, bx: &Builder<'a, 'll, 'tcx>) -> PlaceRef<'ll, 'tcx> {
763         let cx = bx.cx;
764         if let Some(slot) = self.personality_slot {
765             slot
766         } else {
767             let layout = cx.layout_of(cx.tcx.intern_tup(&[
768                 cx.tcx.mk_mut_ptr(cx.tcx.types.u8),
769                 cx.tcx.types.i32
770             ]));
771             let slot = PlaceRef::alloca(bx, layout, "personalityslot");
772             self.personality_slot = Some(slot);
773             slot
774         }
775     }
776
777     /// Return the landingpad wrapper around the given basic block
778     ///
779     /// No-op in MSVC SEH scheme.
780     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> &'ll BasicBlock {
781         if let Some(block) = self.landing_pads[target_bb] {
782             return block;
783         }
784
785         let block = self.blocks[target_bb];
786         let landing_pad = self.landing_pad_uncached(block);
787         self.landing_pads[target_bb] = Some(landing_pad);
788         landing_pad
789     }
790
791     fn landing_pad_uncached(&mut self, target_bb: &'ll BasicBlock) -> &'ll BasicBlock {
792         if base::wants_msvc_seh(self.cx.sess()) {
793             span_bug!(self.mir.span, "landing pad was not inserted?")
794         }
795
796         let bx = self.new_block("cleanup");
797
798         let llpersonality = self.cx.eh_personality();
799         let llretty = self.landing_pad_type();
800         let lp = bx.landing_pad(llretty, llpersonality, 1);
801         bx.set_cleanup(lp);
802
803         let slot = self.get_personality_slot(&bx);
804         slot.storage_live(&bx);
805         Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&bx, slot);
806
807         bx.br(target_bb);
808         bx.llbb()
809     }
810
811     fn landing_pad_type(&self) -> &'ll Type {
812         let cx = self.cx;
813         Type::struct_(cx, &[Type::i8p(cx), Type::i32(cx)], false)
814     }
815
816     fn unreachable_block(&mut self) -> &'ll BasicBlock {
817         self.unreachable_block.unwrap_or_else(|| {
818             let bl = self.new_block("unreachable");
819             bl.unreachable();
820             self.unreachable_block = Some(bl.llbb());
821             bl.llbb()
822         })
823     }
824
825     pub fn new_block(&self, name: &str) -> Builder<'a, 'll, 'tcx> {
826         Builder::new_block(self.cx, self.llfn, name)
827     }
828
829     pub fn build_block(&self, bb: mir::BasicBlock) -> Builder<'a, 'll, 'tcx> {
830         let bx = Builder::with_cx(self.cx);
831         bx.position_at_end(self.blocks[bb]);
832         bx
833     }
834
835     fn make_return_dest(&mut self, bx: &Builder<'a, 'll, 'tcx>,
836                         dest: &mir::Place<'tcx>, fn_ret: &ArgType<'tcx, Ty<'tcx>>,
837                         llargs: &mut Vec<&'ll Value>, is_intrinsic: bool)
838                         -> ReturnDest<'ll, 'tcx> {
839         // If the return is ignored, we can just return a do-nothing ReturnDest
840         if fn_ret.is_ignore() {
841             return ReturnDest::Nothing;
842         }
843         let dest = if let mir::Place::Local(index) = *dest {
844             match self.locals[index] {
845                 LocalRef::Place(dest) => dest,
846                 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
847                 LocalRef::Operand(None) => {
848                     // Handle temporary places, specifically Operand ones, as
849                     // they don't have allocas
850                     return if fn_ret.is_indirect() {
851                         // Odd, but possible, case, we have an operand temporary,
852                         // but the calling convention has an indirect return.
853                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
854                         tmp.storage_live(bx);
855                         llargs.push(tmp.llval);
856                         ReturnDest::IndirectOperand(tmp, index)
857                     } else if is_intrinsic {
858                         // Currently, intrinsics always need a location to store
859                         // the result. so we create a temporary alloca for the
860                         // result
861                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
862                         tmp.storage_live(bx);
863                         ReturnDest::IndirectOperand(tmp, index)
864                     } else {
865                         ReturnDest::DirectOperand(index)
866                     };
867                 }
868                 LocalRef::Operand(Some(_)) => {
869                     bug!("place local already assigned to");
870                 }
871             }
872         } else {
873             self.codegen_place(bx, dest)
874         };
875         if fn_ret.is_indirect() {
876             if dest.align.abi() < dest.layout.align.abi() {
877                 // Currently, MIR code generation does not create calls
878                 // that store directly to fields of packed structs (in
879                 // fact, the calls it creates write only to temps),
880                 //
881                 // If someone changes that, please update this code path
882                 // to create a temporary.
883                 span_bug!(self.mir.span, "can't directly store to unaligned value");
884             }
885             llargs.push(dest.llval);
886             ReturnDest::Nothing
887         } else {
888             ReturnDest::Store(dest)
889         }
890     }
891
892     fn codegen_transmute(&mut self, bx: &Builder<'a, 'll, 'tcx>,
893                        src: &mir::Operand<'tcx>,
894                        dst: &mir::Place<'tcx>) {
895         if let mir::Place::Local(index) = *dst {
896             match self.locals[index] {
897                 LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
898                 LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"),
899                 LocalRef::Operand(None) => {
900                     let dst_layout = bx.cx.layout_of(self.monomorphized_place_ty(dst));
901                     assert!(!dst_layout.ty.has_erasable_regions());
902                     let place = PlaceRef::alloca(bx, dst_layout, "transmute_temp");
903                     place.storage_live(bx);
904                     self.codegen_transmute_into(bx, src, place);
905                     let op = place.load(bx);
906                     place.storage_dead(bx);
907                     self.locals[index] = LocalRef::Operand(Some(op));
908                 }
909                 LocalRef::Operand(Some(op)) => {
910                     assert!(op.layout.is_zst(),
911                             "assigning to initialized SSAtemp");
912                 }
913             }
914         } else {
915             let dst = self.codegen_place(bx, dst);
916             self.codegen_transmute_into(bx, src, dst);
917         }
918     }
919
920     fn codegen_transmute_into(&mut self, bx: &Builder<'a, 'll, 'tcx>,
921                             src: &mir::Operand<'tcx>,
922                             dst: PlaceRef<'ll, 'tcx>) {
923         let src = self.codegen_operand(bx, src);
924         let llty = src.layout.llvm_type(bx.cx);
925         let cast_ptr = bx.pointercast(dst.llval, llty.ptr_to());
926         let align = src.layout.align.min(dst.layout.align);
927         src.val.store(bx, PlaceRef::new_sized(cast_ptr, src.layout, align));
928     }
929
930
931     // Stores the return value of a function call into it's final location.
932     fn store_return(&mut self,
933                     bx: &Builder<'a, 'll, 'tcx>,
934                     dest: ReturnDest<'ll, 'tcx>,
935                     ret_ty: &ArgType<'tcx, Ty<'tcx>>,
936                     llval: &'ll Value) {
937         use self::ReturnDest::*;
938
939         match dest {
940             Nothing => (),
941             Store(dst) => ret_ty.store(bx, llval, dst),
942             IndirectOperand(tmp, index) => {
943                 let op = tmp.load(bx);
944                 tmp.storage_dead(bx);
945                 self.locals[index] = LocalRef::Operand(Some(op));
946             }
947             DirectOperand(index) => {
948                 // If there is a cast, we have to store and reload.
949                 let op = if let PassMode::Cast(_) = ret_ty.mode {
950                     let tmp = PlaceRef::alloca(bx, ret_ty.layout, "tmp_ret");
951                     tmp.storage_live(bx);
952                     ret_ty.store(bx, llval, tmp);
953                     let op = tmp.load(bx);
954                     tmp.storage_dead(bx);
955                     op
956                 } else {
957                     OperandRef::from_immediate_or_packed_pair(bx, llval, ret_ty.layout)
958                 };
959                 self.locals[index] = LocalRef::Operand(Some(op));
960             }
961         }
962     }
963 }
964
965 enum ReturnDest<'ll, 'tcx> {
966     // Do nothing, the return value is indirect or ignored
967     Nothing,
968     // Store the return value to the pointer
969     Store(PlaceRef<'ll, 'tcx>),
970     // Stores an indirect return value to an operand local place
971     IndirectOperand(PlaceRef<'ll, 'tcx>, mir::Local),
972     // Stores a direct return value to an operand local place
973     DirectOperand(mir::Local)
974 }