]> git.lizzy.rs Git - rust.git/blob - src/librustc_trans/mir/block.rs
724ff2f21344f35c31b4ad4c7ec7a7600268e3c1
[rust.git] / src / librustc_trans / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, ValueRef, BasicBlockRef};
12 use rustc::middle::lang_items;
13 use rustc::middle::const_val::{ConstEvalErr, ConstInt, ErrKind};
14 use rustc::ty::{self, TypeFoldable};
15 use rustc::ty::layout::{self, LayoutTyper};
16 use rustc::mir;
17 use abi::{Abi, FnType, ArgType};
18 use adt;
19 use base::{self, Lifetime};
20 use callee;
21 use builder::Builder;
22 use common::{self, Funclet};
23 use common::{C_bool, C_str_slice, C_struct, C_u32, C_undef};
24 use consts;
25 use machine::llalign_of_min;
26 use meth;
27 use monomorphize;
28 use type_of;
29 use type_::Type;
30
31 use rustc_data_structures::indexed_vec::IndexVec;
32 use syntax::symbol::Symbol;
33
34 use std::cmp;
35
36 use super::{MirContext, LocalRef};
37 use super::analyze::CleanupKind;
38 use super::constant::Const;
39 use super::lvalue::{Alignment, LvalueRef};
40 use super::operand::OperandRef;
41 use super::operand::OperandValue::{Pair, Ref, Immediate};
42
43 impl<'a, 'tcx> MirContext<'a, 'tcx> {
44     pub fn trans_block(&mut self, bb: mir::BasicBlock,
45         funclets: &IndexVec<mir::BasicBlock, Option<Funclet>>) {
46         let mut bcx = self.get_builder(bb);
47         let data = &self.mir[bb];
48
49         debug!("trans_block({:?}={:?})", bb, data);
50
51         let funclet = match self.cleanup_kinds[bb] {
52             CleanupKind::Internal { funclet } => funclets[funclet].as_ref(),
53             _ => funclets[bb].as_ref(),
54         };
55
56         // Create the cleanup bundle, if needed.
57         let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
58         let cleanup_bundle = funclet.map(|l| l.bundle());
59
60         let funclet_br = |this: &Self, bcx: Builder, bb: mir::BasicBlock| {
61             let lltarget = this.blocks[bb];
62             if let Some(cp) = cleanup_pad {
63                 match this.cleanup_kinds[bb] {
64                     CleanupKind::Funclet => {
65                         // micro-optimization: generate a `ret` rather than a jump
66                         // to a return block
67                         bcx.cleanup_ret(cp, Some(lltarget));
68                     }
69                     CleanupKind::Internal { .. } => bcx.br(lltarget),
70                     CleanupKind::NotCleanup => bug!("jump from cleanup bb to bb {:?}", bb)
71                 }
72             } else {
73                 bcx.br(lltarget);
74             }
75         };
76
77         let llblock = |this: &mut Self, target: mir::BasicBlock| {
78             let lltarget = this.blocks[target];
79
80             if let Some(cp) = cleanup_pad {
81                 match this.cleanup_kinds[target] {
82                     CleanupKind::Funclet => {
83                         // MSVC cross-funclet jump - need a trampoline
84
85                         debug!("llblock: creating cleanup trampoline for {:?}", target);
86                         let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
87                         let trampoline = this.new_block(name);
88                         trampoline.cleanup_ret(cp, Some(lltarget));
89                         trampoline.llbb()
90                     }
91                     CleanupKind::Internal { .. } => lltarget,
92                     CleanupKind::NotCleanup =>
93                         bug!("jump from cleanup bb {:?} to bb {:?}", bb, target)
94                 }
95             } else {
96                 if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
97                     (this.cleanup_kinds[bb], this.cleanup_kinds[target])
98                 {
99                     // jump *into* cleanup - need a landing pad if GNU
100                     this.landing_pad_to(target)
101                 } else {
102                     lltarget
103                 }
104             }
105         };
106
107         for statement in &data.statements {
108             bcx = self.trans_statement(bcx, statement);
109         }
110
111         let terminator = data.terminator();
112         debug!("trans_block: terminator: {:?}", terminator);
113
114         let span = terminator.source_info.span;
115         self.set_debug_loc(&bcx, terminator.source_info);
116         match terminator.kind {
117             mir::TerminatorKind::Resume => {
118                 if let Some(cleanup_pad) = cleanup_pad {
119                     bcx.cleanup_ret(cleanup_pad, None);
120                 } else {
121                     let ps = self.get_personality_slot(&bcx);
122                     let lp = bcx.load(ps, None);
123                     Lifetime::End.call(&bcx, ps);
124                     if !bcx.sess().target.target.options.custom_unwind_resume {
125                         bcx.resume(lp);
126                     } else {
127                         let exc_ptr = bcx.extract_value(lp, 0);
128                         bcx.call(bcx.ccx.eh_unwind_resume(), &[exc_ptr], cleanup_bundle);
129                         bcx.unreachable();
130                     }
131                 }
132             }
133
134             mir::TerminatorKind::Goto { target } => {
135                 funclet_br(self, bcx, target);
136             }
137
138             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
139                 let discr = self.trans_operand(&bcx, discr);
140                 if switch_ty == bcx.tcx().types.bool {
141                     let lltrue = llblock(self, targets[0]);
142                     let llfalse = llblock(self, targets[1]);
143                     if let [ConstInt::U8(0)] = values[..] {
144                         bcx.cond_br(discr.immediate(), llfalse, lltrue);
145                     } else {
146                         bcx.cond_br(discr.immediate(), lltrue, llfalse);
147                     }
148                 } else {
149                     let (otherwise, targets) = targets.split_last().unwrap();
150                     let switch = bcx.switch(discr.immediate(),
151                                             llblock(self, *otherwise), values.len());
152                     for (value, target) in values.iter().zip(targets) {
153                         let val = Const::from_constint(bcx.ccx, value);
154                         let llbb = llblock(self, *target);
155                         bcx.add_case(switch, val.llval, llbb)
156                     }
157                 }
158             }
159
160             mir::TerminatorKind::Return => {
161                 let ret = self.fn_ty.ret;
162                 if ret.is_ignore() || ret.is_indirect() {
163                     bcx.ret_void();
164                     return;
165                 }
166
167                 let llval = if let Some(cast_ty) = ret.cast {
168                     let op = match self.locals[mir::RETURN_POINTER] {
169                         LocalRef::Operand(Some(op)) => op,
170                         LocalRef::Operand(None) => bug!("use of return before def"),
171                         LocalRef::Lvalue(tr_lvalue) => {
172                             OperandRef {
173                                 val: Ref(tr_lvalue.llval, tr_lvalue.alignment),
174                                 ty: tr_lvalue.ty.to_ty(bcx.tcx())
175                             }
176                         }
177                     };
178                     let llslot = match op.val {
179                         Immediate(_) | Pair(..) => {
180                             let llscratch = bcx.alloca(ret.memory_ty(bcx.ccx), "ret", None);
181                             self.store_operand(&bcx, llscratch, None, op);
182                             llscratch
183                         }
184                         Ref(llval, align) => {
185                             assert_eq!(align, Alignment::AbiAligned,
186                                        "return pointer is unaligned!");
187                             llval
188                         }
189                     };
190                     let load = bcx.load(
191                         bcx.pointercast(llslot, cast_ty.ptr_to()),
192                         Some(ret.layout.align(bcx.ccx).abi() as u32));
193                     load
194                 } else {
195                     let op = self.trans_consume(&bcx, &mir::Lvalue::Local(mir::RETURN_POINTER));
196                     if let Ref(llval, align) = op.val {
197                         base::load_ty(&bcx, llval, align, op.ty)
198                     } else {
199                         op.pack_if_pair(&bcx).immediate()
200                     }
201                 };
202                 bcx.ret(llval);
203             }
204
205             mir::TerminatorKind::Unreachable => {
206                 bcx.unreachable();
207             }
208
209             mir::TerminatorKind::Drop { ref location, target, unwind } => {
210                 let ty = location.ty(&self.mir, bcx.tcx()).to_ty(bcx.tcx());
211                 let ty = self.monomorphize(&ty);
212                 let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.shared(), ty);
213
214                 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
215                     // we don't actually need to drop anything.
216                     funclet_br(self, bcx, target);
217                     return
218                 }
219
220                 let lvalue = self.trans_lvalue(&bcx, location);
221                 let (drop_fn, need_extra) = match ty.sty {
222                     ty::TyDynamic(..) => (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra),
223                                           false),
224                     _ => (callee::get_fn(bcx.ccx, drop_fn), lvalue.has_extra())
225                 };
226                 let args = &[lvalue.llval, lvalue.llextra][..1 + need_extra as usize];
227                 if let Some(unwind) = unwind {
228                     bcx.invoke(
229                         drop_fn,
230                         args,
231                         self.blocks[target],
232                         llblock(self, unwind),
233                         cleanup_bundle
234                     );
235                 } else {
236                     bcx.call(drop_fn, args, cleanup_bundle);
237                     funclet_br(self, bcx, target);
238                 }
239             }
240
241             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
242                 let cond = self.trans_operand(&bcx, cond).immediate();
243                 let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1);
244
245                 // This case can currently arise only from functions marked
246                 // with #[rustc_inherit_overflow_checks] and inlined from
247                 // another crate (mostly core::num generic/#[inline] fns),
248                 // while the current crate doesn't use overflow checks.
249                 // NOTE: Unlike binops, negation doesn't have its own
250                 // checked operation, just a comparison with the minimum
251                 // value, so we have to check for the assert message.
252                 if !bcx.ccx.check_overflow() {
253                     use rustc_const_math::ConstMathErr::Overflow;
254                     use rustc_const_math::Op::Neg;
255
256                     if let mir::AssertMessage::Math(Overflow(Neg)) = *msg {
257                         const_cond = Some(expected);
258                     }
259                 }
260
261                 // Don't translate the panic block if success if known.
262                 if const_cond == Some(expected) {
263                     funclet_br(self, bcx, target);
264                     return;
265                 }
266
267                 // Pass the condition through llvm.expect for branch hinting.
268                 let expect = bcx.ccx.get_intrinsic(&"llvm.expect.i1");
269                 let cond = bcx.call(expect, &[cond, C_bool(bcx.ccx, expected)], None);
270
271                 // Create the failure block and the conditional branch to it.
272                 let lltarget = llblock(self, target);
273                 let panic_block = self.new_block("panic");
274                 if expected {
275                     bcx.cond_br(cond, lltarget, panic_block.llbb());
276                 } else {
277                     bcx.cond_br(cond, panic_block.llbb(), lltarget);
278                 }
279
280                 // After this point, bcx is the block for the call to panic.
281                 bcx = panic_block;
282                 self.set_debug_loc(&bcx, terminator.source_info);
283
284                 // Get the location information.
285                 let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
286                 let filename = Symbol::intern(&loc.file.name).as_str();
287                 let filename = C_str_slice(bcx.ccx, filename);
288                 let line = C_u32(bcx.ccx, loc.line as u32);
289
290                 // Put together the arguments to the panic entry point.
291                 let (lang_item, args, const_err) = match *msg {
292                     mir::AssertMessage::BoundsCheck { ref len, ref index } => {
293                         let len = self.trans_operand(&mut bcx, len).immediate();
294                         let index = self.trans_operand(&mut bcx, index).immediate();
295
296                         let const_err = common::const_to_opt_u128(len, false)
297                             .and_then(|len| common::const_to_opt_u128(index, false)
298                                 .map(|index| ErrKind::IndexOutOfBounds {
299                                     len: len as u64,
300                                     index: index as u64
301                                 }));
302
303                         let file_line = C_struct(bcx.ccx, &[filename, line], false);
304                         let align = llalign_of_min(bcx.ccx, common::val_ty(file_line));
305                         let file_line = consts::addr_of(bcx.ccx,
306                                                         file_line,
307                                                         align,
308                                                         "panic_bounds_check_loc");
309                         (lang_items::PanicBoundsCheckFnLangItem,
310                          vec![file_line, index, len],
311                          const_err)
312                     }
313                     mir::AssertMessage::Math(ref err) => {
314                         let msg_str = Symbol::intern(err.description()).as_str();
315                         let msg_str = C_str_slice(bcx.ccx, msg_str);
316                         let msg_file_line = C_struct(bcx.ccx,
317                                                      &[msg_str, filename, line],
318                                                      false);
319                         let align = llalign_of_min(bcx.ccx, common::val_ty(msg_file_line));
320                         let msg_file_line = consts::addr_of(bcx.ccx,
321                                                             msg_file_line,
322                                                             align,
323                                                             "panic_loc");
324                         (lang_items::PanicFnLangItem,
325                          vec![msg_file_line],
326                          Some(ErrKind::Math(err.clone())))
327                     }
328                 };
329
330                 // If we know we always panic, and the error message
331                 // is also constant, then we can produce a warning.
332                 if const_cond == Some(!expected) {
333                     if let Some(err) = const_err {
334                         let err = ConstEvalErr{ span: span, kind: err };
335                         let mut diag = bcx.tcx().sess.struct_span_warn(
336                             span, "this expression will panic at run-time");
337                         err.note(bcx.tcx(), span, "expression", &mut diag);
338                         diag.emit();
339                     }
340                 }
341
342                 // Obtain the panic entry point.
343                 let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
344                 let instance = ty::Instance::mono(bcx.tcx(), def_id);
345                 let llfn = callee::get_fn(bcx.ccx, instance);
346
347                 // Translate the actual panic invoke/call.
348                 if let Some(unwind) = cleanup {
349                     bcx.invoke(llfn,
350                                &args,
351                                self.unreachable_block(),
352                                llblock(self, unwind),
353                                cleanup_bundle);
354                 } else {
355                     bcx.call(llfn, &args, cleanup_bundle);
356                     bcx.unreachable();
357                 }
358             }
359
360             mir::TerminatorKind::DropAndReplace { .. } => {
361                 bug!("undesugared DropAndReplace in trans: {:?}", data);
362             }
363
364             mir::TerminatorKind::Call { ref func, ref args, ref destination, ref cleanup } => {
365                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
366                 let callee = self.trans_operand(&bcx, func);
367
368                 let (instance, mut llfn, sig) = match callee.ty.sty {
369                     ty::TyFnDef(def_id, substs, sig) => {
370                         (Some(monomorphize::resolve(bcx.ccx.shared(), def_id, substs)),
371                          None,
372                          sig)
373                     }
374                     ty::TyFnPtr(sig) => {
375                         (None,
376                          Some(callee.immediate()),
377                          sig)
378                     }
379                     _ => bug!("{} is not callable", callee.ty)
380                 };
381                 let def = instance.map(|i| i.def);
382                 let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig);
383                 let abi = sig.abi;
384
385                 // Handle intrinsics old trans wants Expr's for, ourselves.
386                 let intrinsic = match def {
387                     Some(ty::InstanceDef::Intrinsic(def_id))
388                         => Some(bcx.tcx().item_name(def_id).as_str()),
389                     _ => None
390                 };
391                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
392
393                 if intrinsic == Some("transmute") {
394                     let &(ref dest, target) = destination.as_ref().unwrap();
395                     self.trans_transmute(&bcx, &args[0], dest);
396                     funclet_br(self, bcx, target);
397                     return;
398                 }
399
400                 let extra_args = &args[sig.inputs().len()..];
401                 let extra_args = extra_args.iter().map(|op_arg| {
402                     let op_ty = op_arg.ty(&self.mir, bcx.tcx());
403                     self.monomorphize(&op_ty)
404                 }).collect::<Vec<_>>();
405
406                 let fn_ty = match def {
407                     Some(ty::InstanceDef::Virtual(..)) => {
408                         FnType::new_vtable(bcx.ccx, sig, &extra_args)
409                     }
410                     Some(ty::InstanceDef::DropGlue(_, None)) => {
411                         // empty drop glue - a nop.
412                         let &(_, target) = destination.as_ref().unwrap();
413                         funclet_br(self, bcx, target);
414                         return;
415                     }
416                     _ => FnType::new(bcx.ccx, sig, &extra_args)
417                 };
418
419                 // The arguments we'll be passing. Plus one to account for outptr, if used.
420                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
421                 let mut llargs = Vec::with_capacity(arg_count);
422
423                 // Prepare the return value destination
424                 let ret_dest = if let Some((ref dest, _)) = *destination {
425                     let is_intrinsic = intrinsic.is_some();
426                     self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs,
427                                           is_intrinsic)
428                 } else {
429                     ReturnDest::Nothing
430                 };
431
432                 // Split the rust-call tupled arguments off.
433                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
434                     let (tup, args) = args.split_last().unwrap();
435                     (args, Some(tup))
436                 } else {
437                     (&args[..], None)
438                 };
439
440                 let is_shuffle = intrinsic.map_or(false, |name| {
441                     name.starts_with("simd_shuffle")
442                 });
443                 let mut idx = 0;
444                 for arg in first_args {
445                     // The indices passed to simd_shuffle* in the
446                     // third argument must be constant. This is
447                     // checked by const-qualification, which also
448                     // promotes any complex rvalues to constants.
449                     if is_shuffle && idx == 2 {
450                         match *arg {
451                             mir::Operand::Consume(_) => {
452                                 span_bug!(span, "shuffle indices must be constant");
453                             }
454                             mir::Operand::Constant(ref constant) => {
455                                 let val = self.trans_constant(&bcx, constant);
456                                 llargs.push(val.llval);
457                                 idx += 1;
458                                 continue;
459                             }
460                         }
461                     }
462
463                     let op = self.trans_operand(&bcx, arg);
464                     self.trans_argument(&bcx, op, &mut llargs, &fn_ty,
465                                         &mut idx, &mut llfn, &def);
466                 }
467                 if let Some(tup) = untuple {
468                     self.trans_arguments_untupled(&bcx, tup, &mut llargs, &fn_ty,
469                                                   &mut idx, &mut llfn, &def)
470                 }
471
472                 if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
473                     use intrinsic::trans_intrinsic_call;
474
475                     let (dest, llargs) = match ret_dest {
476                         _ if fn_ty.ret.is_indirect() => {
477                             (llargs[0], &llargs[1..])
478                         }
479                         ReturnDest::Nothing => {
480                             (C_undef(fn_ty.ret.memory_ty(bcx.ccx).ptr_to()), &llargs[..])
481                         }
482                         ReturnDest::IndirectOperand(dst, _) |
483                         ReturnDest::Store(dst) => (dst, &llargs[..]),
484                         ReturnDest::DirectOperand(_) =>
485                             bug!("Cannot use direct operand with an intrinsic call")
486                     };
487
488                     let callee_ty = common::instance_ty(
489                         bcx.ccx.shared(), instance.as_ref().unwrap());
490                     trans_intrinsic_call(&bcx, callee_ty, &fn_ty, &llargs, dest,
491                                          terminator.source_info.span);
492
493                     if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
494                         // Make a fake operand for store_return
495                         let op = OperandRef {
496                             val: Ref(dst, Alignment::AbiAligned),
497                             ty: sig.output(),
498                         };
499                         self.store_return(&bcx, ret_dest, &fn_ty.ret, op);
500                     }
501
502                     if let Some((_, target)) = *destination {
503                         funclet_br(self, bcx, target);
504                     } else {
505                         bcx.unreachable();
506                     }
507
508                     return;
509                 }
510
511                 let fn_ptr = match (llfn, instance) {
512                     (Some(llfn), _) => llfn,
513                     (None, Some(instance)) => callee::get_fn(bcx.ccx, instance),
514                     _ => span_bug!(span, "no llfn for call"),
515                 };
516
517                 // Many different ways to call a function handled here
518                 if let &Some(cleanup) = cleanup {
519                     let ret_bcx = if let Some((_, target)) = *destination {
520                         self.blocks[target]
521                     } else {
522                         self.unreachable_block()
523                     };
524                     let invokeret = bcx.invoke(fn_ptr,
525                                                &llargs,
526                                                ret_bcx,
527                                                llblock(self, cleanup),
528                                                cleanup_bundle);
529                     fn_ty.apply_attrs_callsite(invokeret);
530
531                     if let Some((_, target)) = *destination {
532                         let ret_bcx = self.get_builder(target);
533                         self.set_debug_loc(&ret_bcx, terminator.source_info);
534                         let op = OperandRef {
535                             val: Immediate(invokeret),
536                             ty: sig.output(),
537                         };
538                         self.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op);
539                     }
540                 } else {
541                     let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
542                     fn_ty.apply_attrs_callsite(llret);
543                     if let Some((_, target)) = *destination {
544                         let op = OperandRef {
545                             val: Immediate(llret),
546                             ty: sig.output(),
547                         };
548                         self.store_return(&bcx, ret_dest, &fn_ty.ret, op);
549                         funclet_br(self, bcx, target);
550                     } else {
551                         bcx.unreachable();
552                     }
553                 }
554             }
555         }
556     }
557
558     fn trans_argument(&mut self,
559                       bcx: &Builder<'a, 'tcx>,
560                       op: OperandRef<'tcx>,
561                       llargs: &mut Vec<ValueRef>,
562                       fn_ty: &FnType<'tcx>,
563                       next_idx: &mut usize,
564                       llfn: &mut Option<ValueRef>,
565                       def: &Option<ty::InstanceDef<'tcx>>) {
566         if let Pair(a, b) = op.val {
567             // Treat the values in a fat pointer separately.
568             if common::type_is_fat_ptr(bcx.ccx, op.ty) {
569                 let (ptr, meta) = (a, b);
570                 if *next_idx == 0 {
571                     if let Some(ty::InstanceDef::Virtual(_, idx)) = *def {
572                         let llmeth = meth::VirtualIndex::from_index(idx).get_fn(bcx, meta);
573                         let llty = fn_ty.llvm_type(bcx.ccx).ptr_to();
574                         *llfn = Some(bcx.pointercast(llmeth, llty));
575                     }
576                 }
577
578                 let imm_op = |x| OperandRef {
579                     val: Immediate(x),
580                     // We won't be checking the type again.
581                     ty: bcx.tcx().types.err
582                 };
583                 self.trans_argument(bcx, imm_op(ptr), llargs, fn_ty, next_idx, llfn, def);
584                 self.trans_argument(bcx, imm_op(meta), llargs, fn_ty, next_idx, llfn, def);
585                 return;
586             }
587         }
588
589         let arg = &fn_ty.args[*next_idx];
590         *next_idx += 1;
591
592         // Fill padding with undef value, where applicable.
593         if let Some(ty) = arg.pad {
594             llargs.push(C_undef(ty));
595         }
596
597         if arg.is_ignore() {
598             return;
599         }
600
601         // Force by-ref if we have to load through a cast pointer.
602         let (mut llval, align, by_ref) = match op.val {
603             Immediate(_) | Pair(..) => {
604                 if arg.is_indirect() || arg.cast.is_some() {
605                     let llscratch = bcx.alloca(arg.memory_ty(bcx.ccx), "arg", None);
606                     self.store_operand(bcx, llscratch, None, op);
607                     (llscratch, Alignment::AbiAligned, true)
608                 } else {
609                     (op.pack_if_pair(bcx).immediate(), Alignment::AbiAligned, false)
610                 }
611             }
612             Ref(llval, Alignment::Packed) if arg.is_indirect() => {
613                 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
614                 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
615                 // have scary latent bugs around.
616
617                 let llscratch = bcx.alloca(arg.memory_ty(bcx.ccx), "arg", None);
618                 base::memcpy_ty(bcx, llscratch, llval, op.ty, Some(1));
619                 (llscratch, Alignment::AbiAligned, true)
620             }
621             Ref(llval, align) => (llval, align, true)
622         };
623
624         if by_ref && !arg.is_indirect() {
625             // Have to load the argument, maybe while casting it.
626             if arg.layout.ty == bcx.tcx().types.bool {
627                 // We store bools as i8 so we need to truncate to i1.
628                 llval = bcx.load_range_assert(llval, 0, 2, llvm::False, None);
629                 llval = bcx.trunc(llval, Type::i1(bcx.ccx));
630             } else if let Some(ty) = arg.cast {
631                 llval = bcx.load(bcx.pointercast(llval, ty.ptr_to()),
632                                  align.min_with(arg.layout.align(bcx.ccx).abi() as u32));
633             } else {
634                 llval = bcx.load(llval, align.to_align());
635             }
636         }
637
638         llargs.push(llval);
639     }
640
641     fn trans_arguments_untupled(&mut self,
642                                 bcx: &Builder<'a, 'tcx>,
643                                 operand: &mir::Operand<'tcx>,
644                                 llargs: &mut Vec<ValueRef>,
645                                 fn_ty: &FnType<'tcx>,
646                                 next_idx: &mut usize,
647                                 llfn: &mut Option<ValueRef>,
648                                 def: &Option<ty::InstanceDef<'tcx>>) {
649         let tuple = self.trans_operand(bcx, operand);
650
651         let arg_types = match tuple.ty.sty {
652             ty::TyTuple(ref tys, _) => tys,
653             _ => span_bug!(self.mir.span,
654                            "bad final argument to \"rust-call\" fn {:?}", tuple.ty)
655         };
656
657         // Handle both by-ref and immediate tuples.
658         match tuple.val {
659             Ref(llval, align) => {
660                 for (n, &ty) in arg_types.iter().enumerate() {
661                     let ptr = LvalueRef::new_sized_ty(llval, tuple.ty, align);
662                     let (ptr, align) = ptr.trans_field_ptr(bcx, n);
663                     let val = if common::type_is_fat_ptr(bcx.ccx, ty) {
664                         let (lldata, llextra) = base::load_fat_ptr(bcx, ptr, align, ty);
665                         Pair(lldata, llextra)
666                     } else {
667                         // trans_argument will load this if it needs to
668                         Ref(ptr, align)
669                     };
670                     let op = OperandRef {
671                         val: val,
672                         ty: ty
673                     };
674                     self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
675                 }
676
677             }
678             Immediate(llval) => {
679                 let l = bcx.ccx.layout_of(tuple.ty);
680                 let v = if let layout::Univariant { ref variant, .. } = *l {
681                     variant
682                 } else {
683                     bug!("Not a tuple.");
684                 };
685                 for (n, &ty) in arg_types.iter().enumerate() {
686                     let mut elem = bcx.extract_value(
687                         llval, adt::struct_llfields_index(v, n));
688                     // Truncate bools to i1, if needed
689                     if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx) {
690                         elem = bcx.trunc(elem, Type::i1(bcx.ccx));
691                     }
692                     // If the tuple is immediate, the elements are as well
693                     let op = OperandRef {
694                         val: Immediate(elem),
695                         ty: ty
696                     };
697                     self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
698                 }
699             }
700             Pair(a, b) => {
701                 let elems = [a, b];
702                 for (n, &ty) in arg_types.iter().enumerate() {
703                     let mut elem = elems[n];
704                     // Truncate bools to i1, if needed
705                     if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx) {
706                         elem = bcx.trunc(elem, Type::i1(bcx.ccx));
707                     }
708                     // Pair is always made up of immediates
709                     let op = OperandRef {
710                         val: Immediate(elem),
711                         ty: ty
712                     };
713                     self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
714                 }
715             }
716         }
717
718     }
719
720     fn get_personality_slot(&mut self, bcx: &Builder<'a, 'tcx>) -> ValueRef {
721         let ccx = bcx.ccx;
722         if let Some(slot) = self.llpersonalityslot {
723             slot
724         } else {
725             let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
726             let slot = bcx.alloca(llretty, "personalityslot", None);
727             self.llpersonalityslot = Some(slot);
728             slot
729         }
730     }
731
732     /// Return the landingpad wrapper around the given basic block
733     ///
734     /// No-op in MSVC SEH scheme.
735     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef {
736         if let Some(block) = self.landing_pads[target_bb] {
737             return block;
738         }
739
740         let block = self.blocks[target_bb];
741         let landing_pad = self.landing_pad_uncached(block);
742         self.landing_pads[target_bb] = Some(landing_pad);
743         landing_pad
744     }
745
746     fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
747         if base::wants_msvc_seh(self.ccx.sess()) {
748             return target_bb;
749         }
750
751         let bcx = self.new_block("cleanup");
752
753         let ccx = bcx.ccx;
754         let llpersonality = self.ccx.eh_personality();
755         let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
756         let llretval = bcx.landing_pad(llretty, llpersonality, 1, self.llfn);
757         bcx.set_cleanup(llretval);
758         let slot = self.get_personality_slot(&bcx);
759         Lifetime::Start.call(&bcx, slot);
760         bcx.store(llretval, slot, None);
761         bcx.br(target_bb);
762         bcx.llbb()
763     }
764
765     fn unreachable_block(&mut self) -> BasicBlockRef {
766         self.unreachable_block.unwrap_or_else(|| {
767             let bl = self.new_block("unreachable");
768             bl.unreachable();
769             self.unreachable_block = Some(bl.llbb());
770             bl.llbb()
771         })
772     }
773
774     pub fn new_block(&self, name: &str) -> Builder<'a, 'tcx> {
775         Builder::new_block(self.ccx, self.llfn, name)
776     }
777
778     pub fn get_builder(&self, bb: mir::BasicBlock) -> Builder<'a, 'tcx> {
779         let builder = Builder::with_ccx(self.ccx);
780         builder.position_at_end(self.blocks[bb]);
781         builder
782     }
783
784     fn make_return_dest(&mut self, bcx: &Builder<'a, 'tcx>,
785                         dest: &mir::Lvalue<'tcx>, fn_ret_ty: &ArgType,
786                         llargs: &mut Vec<ValueRef>, is_intrinsic: bool) -> ReturnDest {
787         // If the return is ignored, we can just return a do-nothing ReturnDest
788         if fn_ret_ty.is_ignore() {
789             return ReturnDest::Nothing;
790         }
791         let dest = if let mir::Lvalue::Local(index) = *dest {
792             let ret_ty = self.monomorphized_lvalue_ty(dest);
793             match self.locals[index] {
794                 LocalRef::Lvalue(dest) => dest,
795                 LocalRef::Operand(None) => {
796                     // Handle temporary lvalues, specifically Operand ones, as
797                     // they don't have allocas
798                     return if fn_ret_ty.is_indirect() {
799                         // Odd, but possible, case, we have an operand temporary,
800                         // but the calling convention has an indirect return.
801                         let tmp = LvalueRef::alloca(bcx, ret_ty, "tmp_ret");
802                         llargs.push(tmp.llval);
803                         ReturnDest::IndirectOperand(tmp.llval, index)
804                     } else if is_intrinsic {
805                         // Currently, intrinsics always need a location to store
806                         // the result. so we create a temporary alloca for the
807                         // result
808                         let tmp = LvalueRef::alloca(bcx, ret_ty, "tmp_ret");
809                         ReturnDest::IndirectOperand(tmp.llval, index)
810                     } else {
811                         ReturnDest::DirectOperand(index)
812                     };
813                 }
814                 LocalRef::Operand(Some(_)) => {
815                     bug!("lvalue local already assigned to");
816                 }
817             }
818         } else {
819             self.trans_lvalue(bcx, dest)
820         };
821         if fn_ret_ty.is_indirect() {
822             match dest.alignment {
823                 Alignment::AbiAligned => {
824                     llargs.push(dest.llval);
825                     ReturnDest::Nothing
826                 },
827                 Alignment::Packed => {
828                     // Currently, MIR code generation does not create calls
829                     // that store directly to fields of packed structs (in
830                     // fact, the calls it creates write only to temps),
831                     //
832                     // If someone changes that, please update this code path
833                     // to create a temporary.
834                     span_bug!(self.mir.span, "can't directly store to unaligned value");
835                 }
836             }
837         } else {
838             ReturnDest::Store(dest.llval)
839         }
840     }
841
842     fn trans_transmute(&mut self, bcx: &Builder<'a, 'tcx>,
843                        src: &mir::Operand<'tcx>,
844                        dst: &mir::Lvalue<'tcx>) {
845         if let mir::Lvalue::Local(index) = *dst {
846             match self.locals[index] {
847                 LocalRef::Lvalue(lvalue) => self.trans_transmute_into(bcx, src, &lvalue),
848                 LocalRef::Operand(None) => {
849                     let lvalue_ty = self.monomorphized_lvalue_ty(dst);
850                     assert!(!lvalue_ty.has_erasable_regions());
851                     let lvalue = LvalueRef::alloca(bcx, lvalue_ty, "transmute_temp");
852                     self.trans_transmute_into(bcx, src, &lvalue);
853                     let op = self.trans_load(bcx, lvalue.llval, lvalue.alignment, lvalue_ty);
854                     self.locals[index] = LocalRef::Operand(Some(op));
855                 }
856                 LocalRef::Operand(Some(_)) => {
857                     let ty = self.monomorphized_lvalue_ty(dst);
858                     assert!(common::type_is_zero_size(bcx.ccx, ty),
859                             "assigning to initialized SSAtemp");
860                 }
861             }
862         } else {
863             let dst = self.trans_lvalue(bcx, dst);
864             self.trans_transmute_into(bcx, src, &dst);
865         }
866     }
867
868     fn trans_transmute_into(&mut self, bcx: &Builder<'a, 'tcx>,
869                             src: &mir::Operand<'tcx>,
870                             dst: &LvalueRef<'tcx>) {
871         let val = self.trans_operand(bcx, src);
872         let llty = type_of::type_of(bcx.ccx, val.ty);
873         let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to());
874         let in_type = val.ty;
875         let out_type = dst.ty.to_ty(bcx.tcx());
876         let llalign = cmp::min(bcx.ccx.align_of(in_type), bcx.ccx.align_of(out_type));
877         self.store_operand(bcx, cast_ptr, Some(llalign), val);
878     }
879
880
881     // Stores the return value of a function call into it's final location.
882     fn store_return(&mut self,
883                     bcx: &Builder<'a, 'tcx>,
884                     dest: ReturnDest,
885                     ret_ty: &ArgType<'tcx>,
886                     op: OperandRef<'tcx>) {
887         use self::ReturnDest::*;
888
889         match dest {
890             Nothing => (),
891             Store(dst) => ret_ty.store(bcx, op.immediate(), dst),
892             IndirectOperand(tmp, index) => {
893                 let op = self.trans_load(bcx, tmp, Alignment::AbiAligned, op.ty);
894                 self.locals[index] = LocalRef::Operand(Some(op));
895             }
896             DirectOperand(index) => {
897                 // If there is a cast, we have to store and reload.
898                 let op = if ret_ty.cast.is_some() {
899                     let tmp = LvalueRef::alloca(bcx, op.ty, "tmp_ret");
900                     ret_ty.store(bcx, op.immediate(), tmp.llval);
901                     self.trans_load(bcx, tmp.llval, tmp.alignment, op.ty)
902                 } else {
903                     op.unpack_if_pair(bcx)
904                 };
905                 self.locals[index] = LocalRef::Operand(Some(op));
906             }
907         }
908     }
909 }
910
911 enum ReturnDest {
912     // Do nothing, the return value is indirect or ignored
913     Nothing,
914     // Store the return value to the pointer
915     Store(ValueRef),
916     // Stores an indirect return value to an operand local lvalue
917     IndirectOperand(ValueRef, mir::Local),
918     // Stores a direct return value to an operand local lvalue
919     DirectOperand(mir::Local)
920 }