]> git.lizzy.rs Git - rust.git/blob - src/librustc_trans/mir/block.rs
Rollup merge of #48480 - jethrogb:patch-5, r=alexcrichton
[rust.git] / src / librustc_trans / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, ValueRef, BasicBlockRef};
12 use rustc::middle::lang_items;
13 use rustc::ty::{self, TypeFoldable};
14 use rustc::ty::layout::{self, LayoutOf};
15 use rustc::traits;
16 use rustc::mir;
17 use abi::{Abi, FnType, ArgType, PassMode};
18 use base;
19 use callee;
20 use builder::Builder;
21 use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_uint_big, C_undef};
22 use consts;
23 use meth;
24 use monomorphize;
25 use type_of::LayoutLlvmExt;
26 use type_::Type;
27
28 use syntax::symbol::Symbol;
29 use syntax_pos::Pos;
30
31 use super::{FunctionCx, LocalRef};
32 use super::place::PlaceRef;
33 use super::operand::OperandRef;
34 use super::operand::OperandValue::{Pair, Ref, Immediate};
35
36 impl<'a, 'tcx> FunctionCx<'a, 'tcx> {
37     pub fn trans_block(&mut self, bb: mir::BasicBlock) {
38         let mut bx = self.build_block(bb);
39         let data = &self.mir[bb];
40
41         debug!("trans_block({:?}={:?})", bb, data);
42
43         for statement in &data.statements {
44             bx = self.trans_statement(bx, statement);
45         }
46
47         self.trans_terminator(bx, bb, data.terminator());
48     }
49
50     fn trans_terminator(&mut self,
51                         mut bx: Builder<'a, 'tcx>,
52                         bb: mir::BasicBlock,
53                         terminator: &mir::Terminator<'tcx>)
54     {
55         debug!("trans_terminator: {:?}", terminator);
56
57         // Create the cleanup bundle, if needed.
58         let tcx = bx.tcx();
59         let span = terminator.source_info.span;
60         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
61         let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
62
63         let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
64         let cleanup_bundle = funclet.map(|l| l.bundle());
65
66         let lltarget = |this: &mut Self, target: mir::BasicBlock| {
67             let lltarget = this.blocks[target];
68             let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
69             match (funclet_bb, target_funclet) {
70                 (None, None) => (lltarget, false),
71                 (Some(f), Some(t_f))
72                     if f == t_f || !base::wants_msvc_seh(tcx.sess)
73                     => (lltarget, false),
74                 (None, Some(_)) => {
75                     // jump *into* cleanup - need a landing pad if GNU
76                     (this.landing_pad_to(target), false)
77                 }
78                 (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
79                 (Some(_), Some(_)) => {
80                     (this.landing_pad_to(target), true)
81                 }
82             }
83         };
84
85         let llblock = |this: &mut Self, target: mir::BasicBlock| {
86             let (lltarget, is_cleanupret) = lltarget(this, target);
87             if is_cleanupret {
88                 // MSVC cross-funclet jump - need a trampoline
89
90                 debug!("llblock: creating cleanup trampoline for {:?}", target);
91                 let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
92                 let trampoline = this.new_block(name);
93                 trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
94                 trampoline.llbb()
95             } else {
96                 lltarget
97             }
98         };
99
100         let funclet_br = |this: &mut Self, bx: Builder, target: mir::BasicBlock| {
101             let (lltarget, is_cleanupret) = lltarget(this, target);
102             if is_cleanupret {
103                 // micro-optimization: generate a `ret` rather than a jump
104                 // to a trampoline.
105                 bx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
106             } else {
107                 bx.br(lltarget);
108             }
109         };
110
111         let do_call = |
112             this: &mut Self,
113             bx: Builder<'a, 'tcx>,
114             fn_ty: FnType<'tcx>,
115             fn_ptr: ValueRef,
116             llargs: &[ValueRef],
117             destination: Option<(ReturnDest<'tcx>, mir::BasicBlock)>,
118             cleanup: Option<mir::BasicBlock>
119         | {
120             if let Some(cleanup) = cleanup {
121                 let ret_bx = if let Some((_, target)) = destination {
122                     this.blocks[target]
123                 } else {
124                     this.unreachable_block()
125                 };
126                 let invokeret = bx.invoke(fn_ptr,
127                                            &llargs,
128                                            ret_bx,
129                                            llblock(this, cleanup),
130                                            cleanup_bundle);
131                 fn_ty.apply_attrs_callsite(invokeret);
132
133                 if let Some((ret_dest, target)) = destination {
134                     let ret_bx = this.build_block(target);
135                     this.set_debug_loc(&ret_bx, terminator.source_info);
136                     this.store_return(&ret_bx, ret_dest, &fn_ty.ret, invokeret);
137                 }
138             } else {
139                 let llret = bx.call(fn_ptr, &llargs, cleanup_bundle);
140                 fn_ty.apply_attrs_callsite(llret);
141                 if this.mir[bb].is_cleanup {
142                     // Cleanup is always the cold path. Don't inline
143                     // drop glue. Also, when there is a deeply-nested
144                     // struct, there are "symmetry" issues that cause
145                     // exponential inlining - see issue #41696.
146                     llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
147                 }
148
149                 if let Some((ret_dest, target)) = destination {
150                     this.store_return(&bx, ret_dest, &fn_ty.ret, llret);
151                     funclet_br(this, bx, target);
152                 } else {
153                     bx.unreachable();
154                 }
155             }
156         };
157
158         self.set_debug_loc(&bx, terminator.source_info);
159         match terminator.kind {
160             mir::TerminatorKind::Resume => {
161                 if let Some(cleanup_pad) = cleanup_pad {
162                     bx.cleanup_ret(cleanup_pad, None);
163                 } else {
164                     let slot = self.get_personality_slot(&bx);
165                     let lp0 = slot.project_field(&bx, 0).load(&bx).immediate();
166                     let lp1 = slot.project_field(&bx, 1).load(&bx).immediate();
167                     slot.storage_dead(&bx);
168
169                     if !bx.sess().target.target.options.custom_unwind_resume {
170                         let mut lp = C_undef(self.landing_pad_type());
171                         lp = bx.insert_value(lp, lp0, 0);
172                         lp = bx.insert_value(lp, lp1, 1);
173                         bx.resume(lp);
174                     } else {
175                         bx.call(bx.cx.eh_unwind_resume(), &[lp0], cleanup_bundle);
176                         bx.unreachable();
177                     }
178                 }
179             }
180
181             mir::TerminatorKind::Abort => {
182                 // Call core::intrinsics::abort()
183                 let fnname = bx.cx.get_intrinsic(&("llvm.trap"));
184                 bx.call(fnname, &[], None);
185                 bx.unreachable();
186             }
187
188             mir::TerminatorKind::Goto { target } => {
189                 funclet_br(self, bx, target);
190             }
191
192             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
193                 let discr = self.trans_operand(&bx, discr);
194                 if switch_ty == bx.tcx().types.bool {
195                     let lltrue = llblock(self, targets[0]);
196                     let llfalse = llblock(self, targets[1]);
197                     if let [0] = values[..] {
198                         bx.cond_br(discr.immediate(), llfalse, lltrue);
199                     } else {
200                         assert_eq!(&values[..], &[1]);
201                         bx.cond_br(discr.immediate(), lltrue, llfalse);
202                     }
203                 } else {
204                     let (otherwise, targets) = targets.split_last().unwrap();
205                     let switch = bx.switch(discr.immediate(),
206                                             llblock(self, *otherwise), values.len());
207                     let switch_llty = bx.cx.layout_of(switch_ty).immediate_llvm_type(bx.cx);
208                     for (&value, target) in values.iter().zip(targets) {
209                         let llval = C_uint_big(switch_llty, value);
210                         let llbb = llblock(self, *target);
211                         bx.add_case(switch, llval, llbb)
212                     }
213                 }
214             }
215
216             mir::TerminatorKind::Return => {
217                 let llval = match self.fn_ty.ret.mode {
218                     PassMode::Ignore | PassMode::Indirect(_) => {
219                         bx.ret_void();
220                         return;
221                     }
222
223                     PassMode::Direct(_) | PassMode::Pair(..) => {
224                         let op = self.trans_consume(&bx, &mir::Place::Local(mir::RETURN_PLACE));
225                         if let Ref(llval, align) = op.val {
226                             bx.load(llval, align)
227                         } else {
228                             op.immediate_or_packed_pair(&bx)
229                         }
230                     }
231
232                     PassMode::Cast(cast_ty) => {
233                         let op = match self.locals[mir::RETURN_PLACE] {
234                             LocalRef::Operand(Some(op)) => op,
235                             LocalRef::Operand(None) => bug!("use of return before def"),
236                             LocalRef::Place(tr_place) => {
237                                 OperandRef {
238                                     val: Ref(tr_place.llval, tr_place.align),
239                                     layout: tr_place.layout
240                                 }
241                             }
242                         };
243                         let llslot = match op.val {
244                             Immediate(_) | Pair(..) => {
245                                 let scratch = PlaceRef::alloca(&bx, self.fn_ty.ret.layout, "ret");
246                                 op.val.store(&bx, scratch);
247                                 scratch.llval
248                             }
249                             Ref(llval, align) => {
250                                 assert_eq!(align.abi(), op.layout.align.abi(),
251                                            "return place is unaligned!");
252                                 llval
253                             }
254                         };
255                         bx.load(
256                             bx.pointercast(llslot, cast_ty.llvm_type(bx.cx).ptr_to()),
257                             self.fn_ty.ret.layout.align)
258                     }
259                 };
260                 bx.ret(llval);
261             }
262
263             mir::TerminatorKind::Unreachable => {
264                 bx.unreachable();
265             }
266
267             mir::TerminatorKind::Drop { ref location, target, unwind } => {
268                 let ty = location.ty(self.mir, bx.tcx()).to_ty(bx.tcx());
269                 let ty = self.monomorphize(&ty);
270                 let drop_fn = monomorphize::resolve_drop_in_place(bx.cx.tcx, ty);
271
272                 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
273                     // we don't actually need to drop anything.
274                     funclet_br(self, bx, target);
275                     return
276                 }
277
278                 let place = self.trans_place(&bx, location);
279                 let mut args: &[_] = &[place.llval, place.llextra];
280                 args = &args[..1 + place.has_extra() as usize];
281                 let (drop_fn, fn_ty) = match ty.sty {
282                     ty::TyDynamic(..) => {
283                         let fn_ty = drop_fn.ty(bx.cx.tcx);
284                         let sig = common::ty_fn_sig(bx.cx, fn_ty);
285                         let sig = bx.tcx().erase_late_bound_regions_and_normalize(&sig);
286                         let fn_ty = FnType::new_vtable(bx.cx, sig, &[]);
287                         args = &args[..1];
288                         (meth::DESTRUCTOR.get_fn(&bx, place.llextra, &fn_ty), fn_ty)
289                     }
290                     _ => {
291                         (callee::get_fn(bx.cx, drop_fn),
292                          FnType::of_instance(bx.cx, &drop_fn))
293                     }
294                 };
295                 do_call(self, bx, fn_ty, drop_fn, args,
296                         Some((ReturnDest::Nothing, target)),
297                         unwind);
298             }
299
300             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
301                 let cond = self.trans_operand(&bx, cond).immediate();
302                 let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1);
303
304                 // This case can currently arise only from functions marked
305                 // with #[rustc_inherit_overflow_checks] and inlined from
306                 // another crate (mostly core::num generic/#[inline] fns),
307                 // while the current crate doesn't use overflow checks.
308                 // NOTE: Unlike binops, negation doesn't have its own
309                 // checked operation, just a comparison with the minimum
310                 // value, so we have to check for the assert message.
311                 if !bx.cx.check_overflow {
312                     use rustc_const_math::ConstMathErr::Overflow;
313                     use rustc_const_math::Op::Neg;
314
315                     if let mir::AssertMessage::Math(Overflow(Neg)) = *msg {
316                         const_cond = Some(expected);
317                     }
318                 }
319
320                 // Don't translate the panic block if success if known.
321                 if const_cond == Some(expected) {
322                     funclet_br(self, bx, target);
323                     return;
324                 }
325
326                 // Pass the condition through llvm.expect for branch hinting.
327                 let expect = bx.cx.get_intrinsic(&"llvm.expect.i1");
328                 let cond = bx.call(expect, &[cond, C_bool(bx.cx, expected)], None);
329
330                 // Create the failure block and the conditional branch to it.
331                 let lltarget = llblock(self, target);
332                 let panic_block = self.new_block("panic");
333                 if expected {
334                     bx.cond_br(cond, lltarget, panic_block.llbb());
335                 } else {
336                     bx.cond_br(cond, panic_block.llbb(), lltarget);
337                 }
338
339                 // After this point, bx is the block for the call to panic.
340                 bx = panic_block;
341                 self.set_debug_loc(&bx, terminator.source_info);
342
343                 // Get the location information.
344                 let loc = bx.sess().codemap().lookup_char_pos(span.lo());
345                 let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
346                 let filename = C_str_slice(bx.cx, filename);
347                 let line = C_u32(bx.cx, loc.line as u32);
348                 let col = C_u32(bx.cx, loc.col.to_usize() as u32 + 1);
349                 let align = tcx.data_layout.aggregate_align
350                     .max(tcx.data_layout.i32_align)
351                     .max(tcx.data_layout.pointer_align);
352
353                 // Put together the arguments to the panic entry point.
354                 let (lang_item, args) = match *msg {
355                     mir::AssertMessage::BoundsCheck { ref len, ref index } => {
356                         let len = self.trans_operand(&mut bx, len).immediate();
357                         let index = self.trans_operand(&mut bx, index).immediate();
358
359                         let file_line_col = C_struct(bx.cx, &[filename, line, col], false);
360                         let file_line_col = consts::addr_of(bx.cx,
361                                                             file_line_col,
362                                                             align,
363                                                             "panic_bounds_check_loc");
364                         (lang_items::PanicBoundsCheckFnLangItem,
365                          vec![file_line_col, index, len])
366                     }
367                     mir::AssertMessage::Math(ref err) => {
368                         let msg_str = Symbol::intern(err.description()).as_str();
369                         let msg_str = C_str_slice(bx.cx, msg_str);
370                         let msg_file_line_col = C_struct(bx.cx,
371                                                      &[msg_str, filename, line, col],
372                                                      false);
373                         let msg_file_line_col = consts::addr_of(bx.cx,
374                                                                 msg_file_line_col,
375                                                                 align,
376                                                                 "panic_loc");
377                         (lang_items::PanicFnLangItem,
378                          vec![msg_file_line_col])
379                     }
380                     mir::AssertMessage::GeneratorResumedAfterReturn |
381                     mir::AssertMessage::GeneratorResumedAfterPanic => {
382                         let str = if let mir::AssertMessage::GeneratorResumedAfterReturn = *msg {
383                             "generator resumed after completion"
384                         } else {
385                             "generator resumed after panicking"
386                         };
387                         let msg_str = Symbol::intern(str).as_str();
388                         let msg_str = C_str_slice(bx.cx, msg_str);
389                         let msg_file_line_col = C_struct(bx.cx,
390                                                      &[msg_str, filename, line, col],
391                                                      false);
392                         let msg_file_line_col = consts::addr_of(bx.cx,
393                                                                 msg_file_line_col,
394                                                                 align,
395                                                                 "panic_loc");
396                         (lang_items::PanicFnLangItem,
397                          vec![msg_file_line_col])
398                     }
399                 };
400
401                 // Obtain the panic entry point.
402                 let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
403                 let instance = ty::Instance::mono(bx.tcx(), def_id);
404                 let fn_ty = FnType::of_instance(bx.cx, &instance);
405                 let llfn = callee::get_fn(bx.cx, instance);
406
407                 // Translate the actual panic invoke/call.
408                 do_call(self, bx, fn_ty, llfn, &args, None, cleanup);
409             }
410
411             mir::TerminatorKind::DropAndReplace { .. } => {
412                 bug!("undesugared DropAndReplace in trans: {:?}", terminator);
413             }
414
415             mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
416                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
417                 let callee = self.trans_operand(&bx, func);
418
419                 let (instance, mut llfn) = match callee.layout.ty.sty {
420                     ty::TyFnDef(def_id, substs) => {
421                         (Some(ty::Instance::resolve(bx.cx.tcx,
422                                                     ty::ParamEnv::empty(traits::Reveal::All),
423                                                     def_id,
424                                                     substs).unwrap()),
425                          None)
426                     }
427                     ty::TyFnPtr(_) => {
428                         (None, Some(callee.immediate()))
429                     }
430                     _ => bug!("{} is not callable", callee.layout.ty)
431                 };
432                 let def = instance.map(|i| i.def);
433                 let sig = callee.layout.ty.fn_sig(bx.tcx());
434                 let sig = bx.tcx().erase_late_bound_regions_and_normalize(&sig);
435                 let abi = sig.abi;
436
437                 // Handle intrinsics old trans wants Expr's for, ourselves.
438                 let intrinsic = match def {
439                     Some(ty::InstanceDef::Intrinsic(def_id))
440                         => Some(bx.tcx().item_name(def_id)),
441                     _ => None
442                 };
443                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
444
445                 if intrinsic == Some("transmute") {
446                     let &(ref dest, target) = destination.as_ref().unwrap();
447                     self.trans_transmute(&bx, &args[0], dest);
448                     funclet_br(self, bx, target);
449                     return;
450                 }
451
452                 let extra_args = &args[sig.inputs().len()..];
453                 let extra_args = extra_args.iter().map(|op_arg| {
454                     let op_ty = op_arg.ty(self.mir, bx.tcx());
455                     self.monomorphize(&op_ty)
456                 }).collect::<Vec<_>>();
457
458                 let fn_ty = match def {
459                     Some(ty::InstanceDef::Virtual(..)) => {
460                         FnType::new_vtable(bx.cx, sig, &extra_args)
461                     }
462                     Some(ty::InstanceDef::DropGlue(_, None)) => {
463                         // empty drop glue - a nop.
464                         let &(_, target) = destination.as_ref().unwrap();
465                         funclet_br(self, bx, target);
466                         return;
467                     }
468                     _ => FnType::new(bx.cx, sig, &extra_args)
469                 };
470
471                 // The arguments we'll be passing. Plus one to account for outptr, if used.
472                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
473                 let mut llargs = Vec::with_capacity(arg_count);
474
475                 // Prepare the return value destination
476                 let ret_dest = if let Some((ref dest, _)) = *destination {
477                     let is_intrinsic = intrinsic.is_some();
478                     self.make_return_dest(&bx, dest, &fn_ty.ret, &mut llargs,
479                                           is_intrinsic)
480                 } else {
481                     ReturnDest::Nothing
482                 };
483
484                 if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
485                     use intrinsic::trans_intrinsic_call;
486
487                     let dest = match ret_dest {
488                         _ if fn_ty.ret.is_indirect() => llargs[0],
489                         ReturnDest::Nothing => {
490                             C_undef(fn_ty.ret.memory_ty(bx.cx).ptr_to())
491                         }
492                         ReturnDest::IndirectOperand(dst, _) |
493                         ReturnDest::Store(dst) => dst.llval,
494                         ReturnDest::DirectOperand(_) =>
495                             bug!("Cannot use direct operand with an intrinsic call")
496                     };
497
498                     let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
499                         // The indices passed to simd_shuffle* in the
500                         // third argument must be constant. This is
501                         // checked by const-qualification, which also
502                         // promotes any complex rvalues to constants.
503                         if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
504                             match *arg {
505                                 mir::Operand::Copy(_) |
506                                 mir::Operand::Move(_) => {
507                                     span_bug!(span, "shuffle indices must be constant");
508                                 }
509                                 mir::Operand::Constant(ref constant) => {
510                                     let (llval, ty) = self.simd_shuffle_indices(
511                                         &bx,
512                                         constant,
513                                     );
514                                     return OperandRef {
515                                         val: Immediate(llval),
516                                         layout: bx.cx.layout_of(ty)
517                                     };
518                                 }
519                             }
520                         }
521
522                         self.trans_operand(&bx, arg)
523                     }).collect();
524
525
526                     let callee_ty = instance.as_ref().unwrap().ty(bx.cx.tcx);
527                     trans_intrinsic_call(&bx, callee_ty, &fn_ty, &args, dest,
528                                          terminator.source_info.span);
529
530                     if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
531                         self.store_return(&bx, ret_dest, &fn_ty.ret, dst.llval);
532                     }
533
534                     if let Some((_, target)) = *destination {
535                         funclet_br(self, bx, target);
536                     } else {
537                         bx.unreachable();
538                     }
539
540                     return;
541                 }
542
543                 // Split the rust-call tupled arguments off.
544                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
545                     let (tup, args) = args.split_last().unwrap();
546                     (args, Some(tup))
547                 } else {
548                     (&args[..], None)
549                 };
550
551                 for (i, arg) in first_args.iter().enumerate() {
552                     let mut op = self.trans_operand(&bx, arg);
553                     if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
554                         if let Pair(data_ptr, meta) = op.val {
555                             llfn = Some(meth::VirtualIndex::from_index(idx)
556                                 .get_fn(&bx, meta, &fn_ty));
557                             llargs.push(data_ptr);
558                             continue;
559                         }
560                     }
561
562                     // The callee needs to own the argument memory if we pass it
563                     // by-ref, so make a local copy of non-immediate constants.
564                     match (arg, op.val) {
565                         (&mir::Operand::Copy(_), Ref(..)) |
566                         (&mir::Operand::Constant(_), Ref(..)) => {
567                             let tmp = PlaceRef::alloca(&bx, op.layout, "const");
568                             op.val.store(&bx, tmp);
569                             op.val = Ref(tmp.llval, tmp.align);
570                         }
571                         _ => {}
572                     }
573
574                     self.trans_argument(&bx, op, &mut llargs, &fn_ty.args[i]);
575                 }
576                 if let Some(tup) = untuple {
577                     self.trans_arguments_untupled(&bx, tup, &mut llargs,
578                         &fn_ty.args[first_args.len()..])
579                 }
580
581                 let fn_ptr = match (llfn, instance) {
582                     (Some(llfn), _) => llfn,
583                     (None, Some(instance)) => callee::get_fn(bx.cx, instance),
584                     _ => span_bug!(span, "no llfn for call"),
585                 };
586
587                 do_call(self, bx, fn_ty, fn_ptr, &llargs,
588                         destination.as_ref().map(|&(_, target)| (ret_dest, target)),
589                         cleanup);
590             }
591             mir::TerminatorKind::GeneratorDrop |
592             mir::TerminatorKind::Yield { .. } => bug!("generator ops in trans"),
593             mir::TerminatorKind::FalseEdges { .. } |
594             mir::TerminatorKind::FalseUnwind { .. } => bug!("borrowck false edges in trans"),
595         }
596     }
597
598     fn trans_argument(&mut self,
599                       bx: &Builder<'a, 'tcx>,
600                       op: OperandRef<'tcx>,
601                       llargs: &mut Vec<ValueRef>,
602                       arg: &ArgType<'tcx>) {
603         // Fill padding with undef value, where applicable.
604         if let Some(ty) = arg.pad {
605             llargs.push(C_undef(ty.llvm_type(bx.cx)));
606         }
607
608         if arg.is_ignore() {
609             return;
610         }
611
612         if let PassMode::Pair(..) = arg.mode {
613             match op.val {
614                 Pair(a, b) => {
615                     llargs.push(a);
616                     llargs.push(b);
617                     return;
618                 }
619                 _ => bug!("trans_argument: {:?} invalid for pair arugment", op)
620             }
621         }
622
623         // Force by-ref if we have to load through a cast pointer.
624         let (mut llval, align, by_ref) = match op.val {
625             Immediate(_) | Pair(..) => {
626                 match arg.mode {
627                     PassMode::Indirect(_) | PassMode::Cast(_) => {
628                         let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
629                         op.val.store(bx, scratch);
630                         (scratch.llval, scratch.align, true)
631                     }
632                     _ => {
633                         (op.immediate_or_packed_pair(bx), arg.layout.align, false)
634                     }
635                 }
636             }
637             Ref(llval, align) => {
638                 if arg.is_indirect() && align.abi() < arg.layout.align.abi() {
639                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
640                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
641                     // have scary latent bugs around.
642
643                     let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
644                     base::memcpy_ty(bx, scratch.llval, llval, op.layout, align);
645                     (scratch.llval, scratch.align, true)
646                 } else {
647                     (llval, align, true)
648                 }
649             }
650         };
651
652         if by_ref && !arg.is_indirect() {
653             // Have to load the argument, maybe while casting it.
654             if let PassMode::Cast(ty) = arg.mode {
655                 llval = bx.load(bx.pointercast(llval, ty.llvm_type(bx.cx).ptr_to()),
656                                  align.min(arg.layout.align));
657             } else {
658                 // We can't use `PlaceRef::load` here because the argument
659                 // may have a type we don't treat as immediate, but the ABI
660                 // used for this call is passing it by-value. In that case,
661                 // the load would just produce `OperandValue::Ref` instead
662                 // of the `OperandValue::Immediate` we need for the call.
663                 llval = bx.load(llval, align);
664                 if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
665                     if scalar.is_bool() {
666                         bx.range_metadata(llval, 0..2);
667                     }
668                 }
669                 // We store bools as i8 so we need to truncate to i1.
670                 llval = base::to_immediate(bx, llval, arg.layout);
671             }
672         }
673
674         llargs.push(llval);
675     }
676
677     fn trans_arguments_untupled(&mut self,
678                                 bx: &Builder<'a, 'tcx>,
679                                 operand: &mir::Operand<'tcx>,
680                                 llargs: &mut Vec<ValueRef>,
681                                 args: &[ArgType<'tcx>]) {
682         let tuple = self.trans_operand(bx, operand);
683
684         // Handle both by-ref and immediate tuples.
685         if let Ref(llval, align) = tuple.val {
686             let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align);
687             for i in 0..tuple.layout.fields.count() {
688                 let field_ptr = tuple_ptr.project_field(bx, i);
689                 self.trans_argument(bx, field_ptr.load(bx), llargs, &args[i]);
690             }
691         } else {
692             // If the tuple is immediate, the elements are as well.
693             for i in 0..tuple.layout.fields.count() {
694                 let op = tuple.extract_field(bx, i);
695                 self.trans_argument(bx, op, llargs, &args[i]);
696             }
697         }
698     }
699
700     fn get_personality_slot(&mut self, bx: &Builder<'a, 'tcx>) -> PlaceRef<'tcx> {
701         let cx = bx.cx;
702         if let Some(slot) = self.personality_slot {
703             slot
704         } else {
705             let layout = cx.layout_of(cx.tcx.intern_tup(&[
706                 cx.tcx.mk_mut_ptr(cx.tcx.types.u8),
707                 cx.tcx.types.i32
708             ], false));
709             let slot = PlaceRef::alloca(bx, layout, "personalityslot");
710             self.personality_slot = Some(slot);
711             slot
712         }
713     }
714
715     /// Return the landingpad wrapper around the given basic block
716     ///
717     /// No-op in MSVC SEH scheme.
718     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef {
719         if let Some(block) = self.landing_pads[target_bb] {
720             return block;
721         }
722
723         let block = self.blocks[target_bb];
724         let landing_pad = self.landing_pad_uncached(block);
725         self.landing_pads[target_bb] = Some(landing_pad);
726         landing_pad
727     }
728
729     fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
730         if base::wants_msvc_seh(self.cx.sess()) {
731             span_bug!(self.mir.span, "landing pad was not inserted?")
732         }
733
734         let bx = self.new_block("cleanup");
735
736         let llpersonality = self.cx.eh_personality();
737         let llretty = self.landing_pad_type();
738         let lp = bx.landing_pad(llretty, llpersonality, 1);
739         bx.set_cleanup(lp);
740
741         let slot = self.get_personality_slot(&bx);
742         slot.storage_live(&bx);
743         Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&bx, slot);
744
745         bx.br(target_bb);
746         bx.llbb()
747     }
748
749     fn landing_pad_type(&self) -> Type {
750         let cx = self.cx;
751         Type::struct_(cx, &[Type::i8p(cx), Type::i32(cx)], false)
752     }
753
754     fn unreachable_block(&mut self) -> BasicBlockRef {
755         self.unreachable_block.unwrap_or_else(|| {
756             let bl = self.new_block("unreachable");
757             bl.unreachable();
758             self.unreachable_block = Some(bl.llbb());
759             bl.llbb()
760         })
761     }
762
763     pub fn new_block(&self, name: &str) -> Builder<'a, 'tcx> {
764         Builder::new_block(self.cx, self.llfn, name)
765     }
766
767     pub fn build_block(&self, bb: mir::BasicBlock) -> Builder<'a, 'tcx> {
768         let bx = Builder::with_cx(self.cx);
769         bx.position_at_end(self.blocks[bb]);
770         bx
771     }
772
773     fn make_return_dest(&mut self, bx: &Builder<'a, 'tcx>,
774                         dest: &mir::Place<'tcx>, fn_ret: &ArgType<'tcx>,
775                         llargs: &mut Vec<ValueRef>, is_intrinsic: bool)
776                         -> ReturnDest<'tcx> {
777         // If the return is ignored, we can just return a do-nothing ReturnDest
778         if fn_ret.is_ignore() {
779             return ReturnDest::Nothing;
780         }
781         let dest = if let mir::Place::Local(index) = *dest {
782             match self.locals[index] {
783                 LocalRef::Place(dest) => dest,
784                 LocalRef::Operand(None) => {
785                     // Handle temporary places, specifically Operand ones, as
786                     // they don't have allocas
787                     return if fn_ret.is_indirect() {
788                         // Odd, but possible, case, we have an operand temporary,
789                         // but the calling convention has an indirect return.
790                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
791                         tmp.storage_live(bx);
792                         llargs.push(tmp.llval);
793                         ReturnDest::IndirectOperand(tmp, index)
794                     } else if is_intrinsic {
795                         // Currently, intrinsics always need a location to store
796                         // the result. so we create a temporary alloca for the
797                         // result
798                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
799                         tmp.storage_live(bx);
800                         ReturnDest::IndirectOperand(tmp, index)
801                     } else {
802                         ReturnDest::DirectOperand(index)
803                     };
804                 }
805                 LocalRef::Operand(Some(_)) => {
806                     bug!("place local already assigned to");
807                 }
808             }
809         } else {
810             self.trans_place(bx, dest)
811         };
812         if fn_ret.is_indirect() {
813             if dest.align.abi() < dest.layout.align.abi() {
814                 // Currently, MIR code generation does not create calls
815                 // that store directly to fields of packed structs (in
816                 // fact, the calls it creates write only to temps),
817                 //
818                 // If someone changes that, please update this code path
819                 // to create a temporary.
820                 span_bug!(self.mir.span, "can't directly store to unaligned value");
821             }
822             llargs.push(dest.llval);
823             ReturnDest::Nothing
824         } else {
825             ReturnDest::Store(dest)
826         }
827     }
828
829     fn trans_transmute(&mut self, bx: &Builder<'a, 'tcx>,
830                        src: &mir::Operand<'tcx>,
831                        dst: &mir::Place<'tcx>) {
832         if let mir::Place::Local(index) = *dst {
833             match self.locals[index] {
834                 LocalRef::Place(place) => self.trans_transmute_into(bx, src, place),
835                 LocalRef::Operand(None) => {
836                     let dst_layout = bx.cx.layout_of(self.monomorphized_place_ty(dst));
837                     assert!(!dst_layout.ty.has_erasable_regions());
838                     let place = PlaceRef::alloca(bx, dst_layout, "transmute_temp");
839                     place.storage_live(bx);
840                     self.trans_transmute_into(bx, src, place);
841                     let op = place.load(bx);
842                     place.storage_dead(bx);
843                     self.locals[index] = LocalRef::Operand(Some(op));
844                 }
845                 LocalRef::Operand(Some(op)) => {
846                     assert!(op.layout.is_zst(),
847                             "assigning to initialized SSAtemp");
848                 }
849             }
850         } else {
851             let dst = self.trans_place(bx, dst);
852             self.trans_transmute_into(bx, src, dst);
853         }
854     }
855
856     fn trans_transmute_into(&mut self, bx: &Builder<'a, 'tcx>,
857                             src: &mir::Operand<'tcx>,
858                             dst: PlaceRef<'tcx>) {
859         let src = self.trans_operand(bx, src);
860         let llty = src.layout.llvm_type(bx.cx);
861         let cast_ptr = bx.pointercast(dst.llval, llty.ptr_to());
862         let align = src.layout.align.min(dst.layout.align);
863         src.val.store(bx, PlaceRef::new_sized(cast_ptr, src.layout, align));
864     }
865
866
867     // Stores the return value of a function call into it's final location.
868     fn store_return(&mut self,
869                     bx: &Builder<'a, 'tcx>,
870                     dest: ReturnDest<'tcx>,
871                     ret_ty: &ArgType<'tcx>,
872                     llval: ValueRef) {
873         use self::ReturnDest::*;
874
875         match dest {
876             Nothing => (),
877             Store(dst) => ret_ty.store(bx, llval, dst),
878             IndirectOperand(tmp, index) => {
879                 let op = tmp.load(bx);
880                 tmp.storage_dead(bx);
881                 self.locals[index] = LocalRef::Operand(Some(op));
882             }
883             DirectOperand(index) => {
884                 // If there is a cast, we have to store and reload.
885                 let op = if let PassMode::Cast(_) = ret_ty.mode {
886                     let tmp = PlaceRef::alloca(bx, ret_ty.layout, "tmp_ret");
887                     tmp.storage_live(bx);
888                     ret_ty.store(bx, llval, tmp);
889                     let op = tmp.load(bx);
890                     tmp.storage_dead(bx);
891                     op
892                 } else {
893                     OperandRef::from_immediate_or_packed_pair(bx, llval, ret_ty.layout)
894                 };
895                 self.locals[index] = LocalRef::Operand(Some(op));
896             }
897         }
898     }
899 }
900
901 enum ReturnDest<'tcx> {
902     // Do nothing, the return value is indirect or ignored
903     Nothing,
904     // Store the return value to the pointer
905     Store(PlaceRef<'tcx>),
906     // Stores an indirect return value to an operand local place
907     IndirectOperand(PlaceRef<'tcx>, mir::Local),
908     // Stores a direct return value to an operand local place
909     DirectOperand(mir::Local)
910 }