]> git.lizzy.rs Git - rust.git/blob - src/librustc_trans/mir/block.rs
1cb3a66e4d8cf84860826f91bdcbd8f46adb893b
[rust.git] / src / librustc_trans / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, ValueRef, BasicBlockRef};
12 use rustc::middle::lang_items;
13 use rustc::middle::const_val::{ConstEvalErr, ConstInt, ErrKind};
14 use rustc::ty::{self, TypeFoldable};
15 use rustc::ty::layout::{self, LayoutOf};
16 use rustc::traits;
17 use rustc::mir;
18 use abi::{Abi, FnType, ArgType, PassMode};
19 use base;
20 use callee;
21 use builder::Builder;
22 use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_undef};
23 use consts;
24 use meth;
25 use monomorphize;
26 use type_of::LayoutLlvmExt;
27 use type_::Type;
28
29 use syntax::symbol::Symbol;
30 use syntax_pos::Pos;
31
32 use super::{MirContext, LocalRef};
33 use super::constant::Const;
34 use super::place::PlaceRef;
35 use super::operand::OperandRef;
36 use super::operand::OperandValue::{Pair, Ref, Immediate};
37
38 impl<'a, 'tcx> MirContext<'a, 'tcx> {
39     pub fn trans_block(&mut self, bb: mir::BasicBlock) {
40         let mut bcx = self.get_builder(bb);
41         let data = &self.mir[bb];
42
43         debug!("trans_block({:?}={:?})", bb, data);
44
45         for statement in &data.statements {
46             bcx = self.trans_statement(bcx, statement);
47         }
48
49         self.trans_terminator(bcx, bb, data.terminator());
50     }
51
52     fn trans_terminator(&mut self,
53                         mut bcx: Builder<'a, 'tcx>,
54                         bb: mir::BasicBlock,
55                         terminator: &mir::Terminator<'tcx>)
56     {
57         debug!("trans_terminator: {:?}", terminator);
58
59         // Create the cleanup bundle, if needed.
60         let tcx = bcx.tcx();
61         let span = terminator.source_info.span;
62         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
63         let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
64
65         let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
66         let cleanup_bundle = funclet.map(|l| l.bundle());
67
68         let lltarget = |this: &mut Self, target: mir::BasicBlock| {
69             let lltarget = this.blocks[target];
70             let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
71             match (funclet_bb, target_funclet) {
72                 (None, None) => (lltarget, false),
73                 (Some(f), Some(t_f))
74                     if f == t_f || !base::wants_msvc_seh(tcx.sess)
75                     => (lltarget, false),
76                 (None, Some(_)) => {
77                     // jump *into* cleanup - need a landing pad if GNU
78                     (this.landing_pad_to(target), false)
79                 }
80                 (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
81                 (Some(_), Some(_)) => {
82                     (this.landing_pad_to(target), true)
83                 }
84             }
85         };
86
87         let llblock = |this: &mut Self, target: mir::BasicBlock| {
88             let (lltarget, is_cleanupret) = lltarget(this, target);
89             if is_cleanupret {
90                 // MSVC cross-funclet jump - need a trampoline
91
92                 debug!("llblock: creating cleanup trampoline for {:?}", target);
93                 let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
94                 let trampoline = this.new_block(name);
95                 trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
96                 trampoline.llbb()
97             } else {
98                 lltarget
99             }
100         };
101
102         let funclet_br = |this: &mut Self, bcx: Builder, target: mir::BasicBlock| {
103             let (lltarget, is_cleanupret) = lltarget(this, target);
104             if is_cleanupret {
105                 // micro-optimization: generate a `ret` rather than a jump
106                 // to a trampoline.
107                 bcx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
108             } else {
109                 bcx.br(lltarget);
110             }
111         };
112
113         let do_call = |
114             this: &mut Self,
115             bcx: Builder<'a, 'tcx>,
116             fn_ty: FnType<'tcx>,
117             fn_ptr: ValueRef,
118             llargs: &[ValueRef],
119             destination: Option<(ReturnDest<'tcx>, mir::BasicBlock)>,
120             cleanup: Option<mir::BasicBlock>
121         | {
122             if let Some(cleanup) = cleanup {
123                 let ret_bcx = if let Some((_, target)) = destination {
124                     this.blocks[target]
125                 } else {
126                     this.unreachable_block()
127                 };
128                 let invokeret = bcx.invoke(fn_ptr,
129                                            &llargs,
130                                            ret_bcx,
131                                            llblock(this, cleanup),
132                                            cleanup_bundle);
133                 fn_ty.apply_attrs_callsite(invokeret);
134
135                 if let Some((ret_dest, target)) = destination {
136                     let ret_bcx = this.get_builder(target);
137                     this.set_debug_loc(&ret_bcx, terminator.source_info);
138                     this.store_return(&ret_bcx, ret_dest, &fn_ty.ret, invokeret);
139                 }
140             } else {
141                 let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
142                 fn_ty.apply_attrs_callsite(llret);
143                 if this.mir[bb].is_cleanup {
144                     // Cleanup is always the cold path. Don't inline
145                     // drop glue. Also, when there is a deeply-nested
146                     // struct, there are "symmetry" issues that cause
147                     // exponential inlining - see issue #41696.
148                     llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
149                 }
150
151                 if let Some((ret_dest, target)) = destination {
152                     this.store_return(&bcx, ret_dest, &fn_ty.ret, llret);
153                     funclet_br(this, bcx, target);
154                 } else {
155                     bcx.unreachable();
156                 }
157             }
158         };
159
160         self.set_debug_loc(&bcx, terminator.source_info);
161         match terminator.kind {
162             mir::TerminatorKind::Resume => {
163                 if let Some(cleanup_pad) = cleanup_pad {
164                     bcx.cleanup_ret(cleanup_pad, None);
165                 } else {
166                     let slot = self.get_personality_slot(&bcx);
167                     let lp0 = slot.project_field(&bcx, 0).load(&bcx).immediate();
168                     let lp1 = slot.project_field(&bcx, 1).load(&bcx).immediate();
169                     slot.storage_dead(&bcx);
170
171                     if !bcx.sess().target.target.options.custom_unwind_resume {
172                         let mut lp = C_undef(self.landing_pad_type());
173                         lp = bcx.insert_value(lp, lp0, 0);
174                         lp = bcx.insert_value(lp, lp1, 1);
175                         bcx.resume(lp);
176                     } else {
177                         bcx.call(bcx.ccx.eh_unwind_resume(), &[lp0], cleanup_bundle);
178                         bcx.unreachable();
179                     }
180                 }
181             }
182
183             mir::TerminatorKind::Goto { target } => {
184                 funclet_br(self, bcx, target);
185             }
186
187             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
188                 let discr = self.trans_operand(&bcx, discr);
189                 if switch_ty == bcx.tcx().types.bool {
190                     let lltrue = llblock(self, targets[0]);
191                     let llfalse = llblock(self, targets[1]);
192                     if let [ConstInt::U8(0)] = values[..] {
193                         bcx.cond_br(discr.immediate(), llfalse, lltrue);
194                     } else {
195                         bcx.cond_br(discr.immediate(), lltrue, llfalse);
196                     }
197                 } else {
198                     let (otherwise, targets) = targets.split_last().unwrap();
199                     let switch = bcx.switch(discr.immediate(),
200                                             llblock(self, *otherwise), values.len());
201                     for (value, target) in values.iter().zip(targets) {
202                         let val = Const::from_constint(bcx.ccx, value);
203                         let llbb = llblock(self, *target);
204                         bcx.add_case(switch, val.llval, llbb)
205                     }
206                 }
207             }
208
209             mir::TerminatorKind::Return => {
210                 let llval = match self.fn_ty.ret.mode {
211                     PassMode::Ignore | PassMode::Indirect(_) => {
212                         bcx.ret_void();
213                         return;
214                     }
215
216                     PassMode::Direct(_) | PassMode::Pair(..) => {
217                         let op = self.trans_consume(&bcx, &mir::Place::Local(mir::RETURN_PLACE));
218                         if let Ref(llval, align) = op.val {
219                             bcx.load(llval, align)
220                         } else {
221                             op.immediate_or_packed_pair(&bcx)
222                         }
223                     }
224
225                     PassMode::Cast(cast_ty) => {
226                         let op = match self.locals[mir::RETURN_PLACE] {
227                             LocalRef::Operand(Some(op)) => op,
228                             LocalRef::Operand(None) => bug!("use of return before def"),
229                             LocalRef::Place(tr_place) => {
230                                 OperandRef {
231                                     val: Ref(tr_place.llval, tr_place.align),
232                                     layout: tr_place.layout
233                                 }
234                             }
235                         };
236                         let llslot = match op.val {
237                             Immediate(_) | Pair(..) => {
238                                 let scratch = PlaceRef::alloca(&bcx, self.fn_ty.ret.layout, "ret");
239                                 op.val.store(&bcx, scratch);
240                                 scratch.llval
241                             }
242                             Ref(llval, align) => {
243                                 assert_eq!(align.abi(), op.layout.align.abi(),
244                                            "return place is unaligned!");
245                                 llval
246                             }
247                         };
248                         bcx.load(
249                             bcx.pointercast(llslot, cast_ty.llvm_type(bcx.ccx).ptr_to()),
250                             self.fn_ty.ret.layout.align)
251                     }
252                 };
253                 bcx.ret(llval);
254             }
255
256             mir::TerminatorKind::Unreachable => {
257                 bcx.unreachable();
258             }
259
260             mir::TerminatorKind::Drop { ref location, target, unwind } => {
261                 let ty = location.ty(self.mir, bcx.tcx()).to_ty(bcx.tcx());
262                 let ty = self.monomorphize(&ty);
263                 let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.tcx(), ty);
264
265                 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
266                     // we don't actually need to drop anything.
267                     funclet_br(self, bcx, target);
268                     return
269                 }
270
271                 let place = self.trans_place(&bcx, location);
272                 let mut args: &[_] = &[place.llval, place.llextra];
273                 args = &args[..1 + place.has_extra() as usize];
274                 let (drop_fn, fn_ty) = match ty.sty {
275                     ty::TyDynamic(..) => {
276                         let fn_ty = drop_fn.ty(bcx.ccx.tcx());
277                         let sig = common::ty_fn_sig(bcx.ccx, fn_ty);
278                         let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig);
279                         let fn_ty = FnType::new_vtable(bcx.ccx, sig, &[]);
280                         args = &args[..1];
281                         (meth::DESTRUCTOR.get_fn(&bcx, place.llextra, &fn_ty), fn_ty)
282                     }
283                     _ => {
284                         (callee::get_fn(bcx.ccx, drop_fn),
285                          FnType::of_instance(bcx.ccx, &drop_fn))
286                     }
287                 };
288                 do_call(self, bcx, fn_ty, drop_fn, args,
289                         Some((ReturnDest::Nothing, target)),
290                         unwind);
291             }
292
293             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
294                 let cond = self.trans_operand(&bcx, cond).immediate();
295                 let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1);
296
297                 // This case can currently arise only from functions marked
298                 // with #[rustc_inherit_overflow_checks] and inlined from
299                 // another crate (mostly core::num generic/#[inline] fns),
300                 // while the current crate doesn't use overflow checks.
301                 // NOTE: Unlike binops, negation doesn't have its own
302                 // checked operation, just a comparison with the minimum
303                 // value, so we have to check for the assert message.
304                 if !bcx.ccx.check_overflow() {
305                     use rustc_const_math::ConstMathErr::Overflow;
306                     use rustc_const_math::Op::Neg;
307
308                     if let mir::AssertMessage::Math(Overflow(Neg)) = *msg {
309                         const_cond = Some(expected);
310                     }
311                 }
312
313                 // Don't translate the panic block if success if known.
314                 if const_cond == Some(expected) {
315                     funclet_br(self, bcx, target);
316                     return;
317                 }
318
319                 // Pass the condition through llvm.expect for branch hinting.
320                 let expect = bcx.ccx.get_intrinsic(&"llvm.expect.i1");
321                 let cond = bcx.call(expect, &[cond, C_bool(bcx.ccx, expected)], None);
322
323                 // Create the failure block and the conditional branch to it.
324                 let lltarget = llblock(self, target);
325                 let panic_block = self.new_block("panic");
326                 if expected {
327                     bcx.cond_br(cond, lltarget, panic_block.llbb());
328                 } else {
329                     bcx.cond_br(cond, panic_block.llbb(), lltarget);
330                 }
331
332                 // After this point, bcx is the block for the call to panic.
333                 bcx = panic_block;
334                 self.set_debug_loc(&bcx, terminator.source_info);
335
336                 // Get the location information.
337                 let loc = bcx.sess().codemap().lookup_char_pos(span.lo());
338                 let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
339                 let filename = C_str_slice(bcx.ccx, filename);
340                 let line = C_u32(bcx.ccx, loc.line as u32);
341                 let col = C_u32(bcx.ccx, loc.col.to_usize() as u32 + 1);
342                 let align = tcx.data_layout.aggregate_align
343                     .max(tcx.data_layout.i32_align)
344                     .max(tcx.data_layout.pointer_align);
345
346                 // Put together the arguments to the panic entry point.
347                 let (lang_item, args, const_err) = match *msg {
348                     mir::AssertMessage::BoundsCheck { ref len, ref index } => {
349                         let len = self.trans_operand(&mut bcx, len).immediate();
350                         let index = self.trans_operand(&mut bcx, index).immediate();
351
352                         let const_err = common::const_to_opt_u128(len, false)
353                             .and_then(|len| common::const_to_opt_u128(index, false)
354                                 .map(|index| ErrKind::IndexOutOfBounds {
355                                     len: len as u64,
356                                     index: index as u64
357                                 }));
358
359                         let file_line_col = C_struct(bcx.ccx, &[filename, line, col], false);
360                         let file_line_col = consts::addr_of(bcx.ccx,
361                                                             file_line_col,
362                                                             align,
363                                                             "panic_bounds_check_loc");
364                         (lang_items::PanicBoundsCheckFnLangItem,
365                          vec![file_line_col, index, len],
366                          const_err)
367                     }
368                     mir::AssertMessage::Math(ref err) => {
369                         let msg_str = Symbol::intern(err.description()).as_str();
370                         let msg_str = C_str_slice(bcx.ccx, msg_str);
371                         let msg_file_line_col = C_struct(bcx.ccx,
372                                                      &[msg_str, filename, line, col],
373                                                      false);
374                         let msg_file_line_col = consts::addr_of(bcx.ccx,
375                                                                 msg_file_line_col,
376                                                                 align,
377                                                                 "panic_loc");
378                         (lang_items::PanicFnLangItem,
379                          vec![msg_file_line_col],
380                          Some(ErrKind::Math(err.clone())))
381                     }
382                     mir::AssertMessage::GeneratorResumedAfterReturn |
383                     mir::AssertMessage::GeneratorResumedAfterPanic => {
384                         let str = if let mir::AssertMessage::GeneratorResumedAfterReturn = *msg {
385                             "generator resumed after completion"
386                         } else {
387                             "generator resumed after panicking"
388                         };
389                         let msg_str = Symbol::intern(str).as_str();
390                         let msg_str = C_str_slice(bcx.ccx, msg_str);
391                         let msg_file_line_col = C_struct(bcx.ccx,
392                                                      &[msg_str, filename, line, col],
393                                                      false);
394                         let msg_file_line_col = consts::addr_of(bcx.ccx,
395                                                                 msg_file_line_col,
396                                                                 align,
397                                                                 "panic_loc");
398                         (lang_items::PanicFnLangItem,
399                          vec![msg_file_line_col],
400                          None)
401                     }
402                 };
403
404                 // If we know we always panic, and the error message
405                 // is also constant, then we can produce a warning.
406                 if const_cond == Some(!expected) {
407                     if let Some(err) = const_err {
408                         let err = ConstEvalErr{ span: span, kind: err };
409                         let mut diag = bcx.tcx().sess.struct_span_warn(
410                             span, "this expression will panic at run-time");
411                         err.note(bcx.tcx(), span, "expression", &mut diag);
412                         diag.emit();
413                     }
414                 }
415
416                 // Obtain the panic entry point.
417                 let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
418                 let instance = ty::Instance::mono(bcx.tcx(), def_id);
419                 let fn_ty = FnType::of_instance(bcx.ccx, &instance);
420                 let llfn = callee::get_fn(bcx.ccx, instance);
421
422                 // Translate the actual panic invoke/call.
423                 do_call(self, bcx, fn_ty, llfn, &args, None, cleanup);
424             }
425
426             mir::TerminatorKind::DropAndReplace { .. } => {
427                 bug!("undesugared DropAndReplace in trans: {:?}", terminator);
428             }
429
430             mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
431                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
432                 let callee = self.trans_operand(&bcx, func);
433
434                 let (instance, mut llfn) = match callee.layout.ty.sty {
435                     ty::TyFnDef(def_id, substs) => {
436                         (Some(ty::Instance::resolve(bcx.ccx.tcx(),
437                                                     ty::ParamEnv::empty(traits::Reveal::All),
438                                                     def_id,
439                                                     substs).unwrap()),
440                          None)
441                     }
442                     ty::TyFnPtr(_) => {
443                         (None, Some(callee.immediate()))
444                     }
445                     _ => bug!("{} is not callable", callee.layout.ty)
446                 };
447                 let def = instance.map(|i| i.def);
448                 let sig = callee.layout.ty.fn_sig(bcx.tcx());
449                 let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig);
450                 let abi = sig.abi;
451
452                 // Handle intrinsics old trans wants Expr's for, ourselves.
453                 let intrinsic = match def {
454                     Some(ty::InstanceDef::Intrinsic(def_id))
455                         => Some(bcx.tcx().item_name(def_id)),
456                     _ => None
457                 };
458                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
459
460                 if intrinsic == Some("transmute") {
461                     let &(ref dest, target) = destination.as_ref().unwrap();
462                     self.trans_transmute(&bcx, &args[0], dest);
463                     funclet_br(self, bcx, target);
464                     return;
465                 }
466
467                 let extra_args = &args[sig.inputs().len()..];
468                 let extra_args = extra_args.iter().map(|op_arg| {
469                     let op_ty = op_arg.ty(self.mir, bcx.tcx());
470                     self.monomorphize(&op_ty)
471                 }).collect::<Vec<_>>();
472
473                 let fn_ty = match def {
474                     Some(ty::InstanceDef::Virtual(..)) => {
475                         FnType::new_vtable(bcx.ccx, sig, &extra_args)
476                     }
477                     Some(ty::InstanceDef::DropGlue(_, None)) => {
478                         // empty drop glue - a nop.
479                         let &(_, target) = destination.as_ref().unwrap();
480                         funclet_br(self, bcx, target);
481                         return;
482                     }
483                     _ => FnType::new(bcx.ccx, sig, &extra_args)
484                 };
485
486                 // The arguments we'll be passing. Plus one to account for outptr, if used.
487                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
488                 let mut llargs = Vec::with_capacity(arg_count);
489
490                 // Prepare the return value destination
491                 let ret_dest = if let Some((ref dest, _)) = *destination {
492                     let is_intrinsic = intrinsic.is_some();
493                     self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs,
494                                           is_intrinsic)
495                 } else {
496                     ReturnDest::Nothing
497                 };
498
499                 if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
500                     use intrinsic::trans_intrinsic_call;
501
502                     let dest = match ret_dest {
503                         _ if fn_ty.ret.is_indirect() => llargs[0],
504                         ReturnDest::Nothing => {
505                             C_undef(fn_ty.ret.memory_ty(bcx.ccx).ptr_to())
506                         }
507                         ReturnDest::IndirectOperand(dst, _) |
508                         ReturnDest::Store(dst) => dst.llval,
509                         ReturnDest::DirectOperand(_) =>
510                             bug!("Cannot use direct operand with an intrinsic call")
511                     };
512
513                     let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
514                         // The indices passed to simd_shuffle* in the
515                         // third argument must be constant. This is
516                         // checked by const-qualification, which also
517                         // promotes any complex rvalues to constants.
518                         if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
519                             match *arg {
520                                 mir::Operand::Copy(_) |
521                                 mir::Operand::Move(_) => {
522                                     span_bug!(span, "shuffle indices must be constant");
523                                 }
524                                 mir::Operand::Constant(ref constant) => {
525                                     let val = self.trans_constant(&bcx, constant);
526                                     return OperandRef {
527                                         val: Immediate(val.llval),
528                                         layout: bcx.ccx.layout_of(val.ty)
529                                     };
530                                 }
531                             }
532                         }
533
534                         self.trans_operand(&bcx, arg)
535                     }).collect();
536
537
538                     let callee_ty = instance.as_ref().unwrap().ty(bcx.ccx.tcx());
539                     trans_intrinsic_call(&bcx, callee_ty, &fn_ty, &args, dest,
540                                          terminator.source_info.span);
541
542                     if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
543                         self.store_return(&bcx, ret_dest, &fn_ty.ret, dst.llval);
544                     }
545
546                     if let Some((_, target)) = *destination {
547                         funclet_br(self, bcx, target);
548                     } else {
549                         bcx.unreachable();
550                     }
551
552                     return;
553                 }
554
555                 // Split the rust-call tupled arguments off.
556                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
557                     let (tup, args) = args.split_last().unwrap();
558                     (args, Some(tup))
559                 } else {
560                     (&args[..], None)
561                 };
562
563                 for (i, arg) in first_args.iter().enumerate() {
564                     let mut op = self.trans_operand(&bcx, arg);
565                     if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
566                         if let Pair(data_ptr, meta) = op.val {
567                             llfn = Some(meth::VirtualIndex::from_index(idx)
568                                 .get_fn(&bcx, meta, &fn_ty));
569                             llargs.push(data_ptr);
570                             continue;
571                         }
572                     }
573
574                     // The callee needs to own the argument memory if we pass it
575                     // by-ref, so make a local copy of non-immediate constants.
576                     match (arg, op.val) {
577                         (&mir::Operand::Copy(_), Ref(..)) |
578                         (&mir::Operand::Constant(_), Ref(..)) => {
579                             let tmp = PlaceRef::alloca(&bcx, op.layout, "const");
580                             op.val.store(&bcx, tmp);
581                             op.val = Ref(tmp.llval, tmp.align);
582                         }
583                         _ => {}
584                     }
585
586                     self.trans_argument(&bcx, op, &mut llargs, &fn_ty.args[i]);
587                 }
588                 if let Some(tup) = untuple {
589                     self.trans_arguments_untupled(&bcx, tup, &mut llargs,
590                         &fn_ty.args[first_args.len()..])
591                 }
592
593                 let fn_ptr = match (llfn, instance) {
594                     (Some(llfn), _) => llfn,
595                     (None, Some(instance)) => callee::get_fn(bcx.ccx, instance),
596                     _ => span_bug!(span, "no llfn for call"),
597                 };
598
599                 do_call(self, bcx, fn_ty, fn_ptr, &llargs,
600                         destination.as_ref().map(|&(_, target)| (ret_dest, target)),
601                         cleanup);
602             }
603             mir::TerminatorKind::GeneratorDrop |
604             mir::TerminatorKind::Yield { .. } |
605             mir::TerminatorKind::FalseEdges { .. } => bug!("generator ops in trans"),
606         }
607     }
608
609     fn trans_argument(&mut self,
610                       bcx: &Builder<'a, 'tcx>,
611                       op: OperandRef<'tcx>,
612                       llargs: &mut Vec<ValueRef>,
613                       arg: &ArgType<'tcx>) {
614         // Fill padding with undef value, where applicable.
615         if let Some(ty) = arg.pad {
616             llargs.push(C_undef(ty.llvm_type(bcx.ccx)));
617         }
618
619         if arg.is_ignore() {
620             return;
621         }
622
623         if let PassMode::Pair(..) = arg.mode {
624             match op.val {
625                 Pair(a, b) => {
626                     llargs.push(a);
627                     llargs.push(b);
628                     return;
629                 }
630                 _ => bug!("trans_argument: {:?} invalid for pair arugment", op)
631             }
632         }
633
634         // Force by-ref if we have to load through a cast pointer.
635         let (mut llval, align, by_ref) = match op.val {
636             Immediate(_) | Pair(..) => {
637                 match arg.mode {
638                     PassMode::Indirect(_) | PassMode::Cast(_) => {
639                         let scratch = PlaceRef::alloca(bcx, arg.layout, "arg");
640                         op.val.store(bcx, scratch);
641                         (scratch.llval, scratch.align, true)
642                     }
643                     _ => {
644                         (op.immediate_or_packed_pair(bcx), arg.layout.align, false)
645                     }
646                 }
647             }
648             Ref(llval, align) => {
649                 if arg.is_indirect() && align.abi() < arg.layout.align.abi() {
650                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
651                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
652                     // have scary latent bugs around.
653
654                     let scratch = PlaceRef::alloca(bcx, arg.layout, "arg");
655                     base::memcpy_ty(bcx, scratch.llval, llval, op.layout, align);
656                     (scratch.llval, scratch.align, true)
657                 } else {
658                     (llval, align, true)
659                 }
660             }
661         };
662
663         if by_ref && !arg.is_indirect() {
664             // Have to load the argument, maybe while casting it.
665             if let PassMode::Cast(ty) = arg.mode {
666                 llval = bcx.load(bcx.pointercast(llval, ty.llvm_type(bcx.ccx).ptr_to()),
667                                  align.min(arg.layout.align));
668             } else {
669                 // We can't use `PlaceRef::load` here because the argument
670                 // may have a type we don't treat as immediate, but the ABI
671                 // used for this call is passing it by-value. In that case,
672                 // the load would just produce `OperandValue::Ref` instead
673                 // of the `OperandValue::Immediate` we need for the call.
674                 llval = bcx.load(llval, align);
675                 if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
676                     if scalar.is_bool() {
677                         bcx.range_metadata(llval, 0..2);
678                     }
679                 }
680                 // We store bools as i8 so we need to truncate to i1.
681                 llval = base::to_immediate(bcx, llval, arg.layout);
682             }
683         }
684
685         llargs.push(llval);
686     }
687
688     fn trans_arguments_untupled(&mut self,
689                                 bcx: &Builder<'a, 'tcx>,
690                                 operand: &mir::Operand<'tcx>,
691                                 llargs: &mut Vec<ValueRef>,
692                                 args: &[ArgType<'tcx>]) {
693         let tuple = self.trans_operand(bcx, operand);
694
695         // Handle both by-ref and immediate tuples.
696         if let Ref(llval, align) = tuple.val {
697             let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align);
698             for i in 0..tuple.layout.fields.count() {
699                 let field_ptr = tuple_ptr.project_field(bcx, i);
700                 self.trans_argument(bcx, field_ptr.load(bcx), llargs, &args[i]);
701             }
702         } else {
703             // If the tuple is immediate, the elements are as well.
704             for i in 0..tuple.layout.fields.count() {
705                 let op = tuple.extract_field(bcx, i);
706                 self.trans_argument(bcx, op, llargs, &args[i]);
707             }
708         }
709     }
710
711     fn get_personality_slot(&mut self, bcx: &Builder<'a, 'tcx>) -> PlaceRef<'tcx> {
712         let ccx = bcx.ccx;
713         if let Some(slot) = self.personality_slot {
714             slot
715         } else {
716             let layout = ccx.layout_of(ccx.tcx().intern_tup(&[
717                 ccx.tcx().mk_mut_ptr(ccx.tcx().types.u8),
718                 ccx.tcx().types.i32
719             ], false));
720             let slot = PlaceRef::alloca(bcx, layout, "personalityslot");
721             self.personality_slot = Some(slot);
722             slot
723         }
724     }
725
726     /// Return the landingpad wrapper around the given basic block
727     ///
728     /// No-op in MSVC SEH scheme.
729     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef {
730         if let Some(block) = self.landing_pads[target_bb] {
731             return block;
732         }
733
734         let block = self.blocks[target_bb];
735         let landing_pad = self.landing_pad_uncached(block);
736         self.landing_pads[target_bb] = Some(landing_pad);
737         landing_pad
738     }
739
740     fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
741         if base::wants_msvc_seh(self.ccx.sess()) {
742             span_bug!(self.mir.span, "landing pad was not inserted?")
743         }
744
745         let bcx = self.new_block("cleanup");
746
747         let llpersonality = self.ccx.eh_personality();
748         let llretty = self.landing_pad_type();
749         let lp = bcx.landing_pad(llretty, llpersonality, 1, self.llfn);
750         bcx.set_cleanup(lp);
751
752         let slot = self.get_personality_slot(&bcx);
753         slot.storage_live(&bcx);
754         Pair(bcx.extract_value(lp, 0), bcx.extract_value(lp, 1)).store(&bcx, slot);
755
756         bcx.br(target_bb);
757         bcx.llbb()
758     }
759
760     fn landing_pad_type(&self) -> Type {
761         let ccx = self.ccx;
762         Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false)
763     }
764
765     fn unreachable_block(&mut self) -> BasicBlockRef {
766         self.unreachable_block.unwrap_or_else(|| {
767             let bl = self.new_block("unreachable");
768             bl.unreachable();
769             self.unreachable_block = Some(bl.llbb());
770             bl.llbb()
771         })
772     }
773
774     pub fn new_block(&self, name: &str) -> Builder<'a, 'tcx> {
775         Builder::new_block(self.ccx, self.llfn, name)
776     }
777
778     pub fn get_builder(&self, bb: mir::BasicBlock) -> Builder<'a, 'tcx> {
779         let builder = Builder::with_ccx(self.ccx);
780         builder.position_at_end(self.blocks[bb]);
781         builder
782     }
783
784     fn make_return_dest(&mut self, bcx: &Builder<'a, 'tcx>,
785                         dest: &mir::Place<'tcx>, fn_ret: &ArgType<'tcx>,
786                         llargs: &mut Vec<ValueRef>, is_intrinsic: bool)
787                         -> ReturnDest<'tcx> {
788         // If the return is ignored, we can just return a do-nothing ReturnDest
789         if fn_ret.is_ignore() {
790             return ReturnDest::Nothing;
791         }
792         let dest = if let mir::Place::Local(index) = *dest {
793             match self.locals[index] {
794                 LocalRef::Place(dest) => dest,
795                 LocalRef::Operand(None) => {
796                     // Handle temporary places, specifically Operand ones, as
797                     // they don't have allocas
798                     return if fn_ret.is_indirect() {
799                         // Odd, but possible, case, we have an operand temporary,
800                         // but the calling convention has an indirect return.
801                         let tmp = PlaceRef::alloca(bcx, fn_ret.layout, "tmp_ret");
802                         tmp.storage_live(bcx);
803                         llargs.push(tmp.llval);
804                         ReturnDest::IndirectOperand(tmp, index)
805                     } else if is_intrinsic {
806                         // Currently, intrinsics always need a location to store
807                         // the result. so we create a temporary alloca for the
808                         // result
809                         let tmp = PlaceRef::alloca(bcx, fn_ret.layout, "tmp_ret");
810                         tmp.storage_live(bcx);
811                         ReturnDest::IndirectOperand(tmp, index)
812                     } else {
813                         ReturnDest::DirectOperand(index)
814                     };
815                 }
816                 LocalRef::Operand(Some(_)) => {
817                     bug!("place local already assigned to");
818                 }
819             }
820         } else {
821             self.trans_place(bcx, dest)
822         };
823         if fn_ret.is_indirect() {
824             if dest.align.abi() < dest.layout.align.abi() {
825                 // Currently, MIR code generation does not create calls
826                 // that store directly to fields of packed structs (in
827                 // fact, the calls it creates write only to temps),
828                 //
829                 // If someone changes that, please update this code path
830                 // to create a temporary.
831                 span_bug!(self.mir.span, "can't directly store to unaligned value");
832             }
833             llargs.push(dest.llval);
834             ReturnDest::Nothing
835         } else {
836             ReturnDest::Store(dest)
837         }
838     }
839
840     fn trans_transmute(&mut self, bcx: &Builder<'a, 'tcx>,
841                        src: &mir::Operand<'tcx>,
842                        dst: &mir::Place<'tcx>) {
843         if let mir::Place::Local(index) = *dst {
844             match self.locals[index] {
845                 LocalRef::Place(place) => self.trans_transmute_into(bcx, src, place),
846                 LocalRef::Operand(None) => {
847                     let dst_layout = bcx.ccx.layout_of(self.monomorphized_place_ty(dst));
848                     assert!(!dst_layout.ty.has_erasable_regions());
849                     let place = PlaceRef::alloca(bcx, dst_layout, "transmute_temp");
850                     place.storage_live(bcx);
851                     self.trans_transmute_into(bcx, src, place);
852                     let op = place.load(bcx);
853                     place.storage_dead(bcx);
854                     self.locals[index] = LocalRef::Operand(Some(op));
855                 }
856                 LocalRef::Operand(Some(op)) => {
857                     assert!(op.layout.is_zst(),
858                             "assigning to initialized SSAtemp");
859                 }
860             }
861         } else {
862             let dst = self.trans_place(bcx, dst);
863             self.trans_transmute_into(bcx, src, dst);
864         }
865     }
866
867     fn trans_transmute_into(&mut self, bcx: &Builder<'a, 'tcx>,
868                             src: &mir::Operand<'tcx>,
869                             dst: PlaceRef<'tcx>) {
870         let src = self.trans_operand(bcx, src);
871         let llty = src.layout.llvm_type(bcx.ccx);
872         let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to());
873         let align = src.layout.align.min(dst.layout.align);
874         src.val.store(bcx, PlaceRef::new_sized(cast_ptr, src.layout, align));
875     }
876
877
878     // Stores the return value of a function call into it's final location.
879     fn store_return(&mut self,
880                     bcx: &Builder<'a, 'tcx>,
881                     dest: ReturnDest<'tcx>,
882                     ret_ty: &ArgType<'tcx>,
883                     llval: ValueRef) {
884         use self::ReturnDest::*;
885
886         match dest {
887             Nothing => (),
888             Store(dst) => ret_ty.store(bcx, llval, dst),
889             IndirectOperand(tmp, index) => {
890                 let op = tmp.load(bcx);
891                 tmp.storage_dead(bcx);
892                 self.locals[index] = LocalRef::Operand(Some(op));
893             }
894             DirectOperand(index) => {
895                 // If there is a cast, we have to store and reload.
896                 let op = if let PassMode::Cast(_) = ret_ty.mode {
897                     let tmp = PlaceRef::alloca(bcx, ret_ty.layout, "tmp_ret");
898                     tmp.storage_live(bcx);
899                     ret_ty.store(bcx, llval, tmp);
900                     let op = tmp.load(bcx);
901                     tmp.storage_dead(bcx);
902                     op
903                 } else {
904                     OperandRef::from_immediate_or_packed_pair(bcx, llval, ret_ty.layout)
905                 };
906                 self.locals[index] = LocalRef::Operand(Some(op));
907             }
908         }
909     }
910 }
911
912 enum ReturnDest<'tcx> {
913     // Do nothing, the return value is indirect or ignored
914     Nothing,
915     // Store the return value to the pointer
916     Store(PlaceRef<'tcx>),
917     // Stores an indirect return value to an operand local place
918     IndirectOperand(PlaceRef<'tcx>, mir::Local),
919     // Stores a direct return value to an operand local place
920     DirectOperand(mir::Local)
921 }