]> git.lizzy.rs Git - rust.git/blob - src/librustc_trans/mir/block.rs
Auto merge of #35856 - phimuemue:master, r=brson
[rust.git] / src / librustc_trans / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, ValueRef};
12 use rustc_const_eval::{ErrKind, ConstEvalErr, note_const_eval_err};
13 use rustc::middle::lang_items;
14 use rustc::ty;
15 use rustc::mir::repr as mir;
16 use abi::{Abi, FnType, ArgType};
17 use adt;
18 use base;
19 use build;
20 use callee::{Callee, CalleeData, Fn, Intrinsic, NamedTupleConstructor, Virtual};
21 use common::{self, Block, BlockAndBuilder, LandingPad};
22 use common::{C_bool, C_str_slice, C_struct, C_u32, C_undef};
23 use consts;
24 use debuginfo::DebugLoc;
25 use Disr;
26 use machine::{llalign_of_min, llbitsize_of_real};
27 use meth;
28 use type_of;
29 use glue;
30 use type_::Type;
31
32 use rustc_data_structures::fnv::FnvHashMap;
33 use syntax::parse::token;
34
35 use super::{MirContext, LocalRef};
36 use super::analyze::CleanupKind;
37 use super::constant::Const;
38 use super::lvalue::{LvalueRef, load_fat_ptr};
39 use super::operand::OperandRef;
40 use super::operand::OperandValue::*;
41
42 impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
43     pub fn trans_block(&mut self, bb: mir::BasicBlock) {
44         let mut bcx = self.bcx(bb);
45         let mir = self.mir.clone();
46         let data = &mir[bb];
47
48         debug!("trans_block({:?}={:?})", bb, data);
49
50         // Create the cleanup bundle, if needed.
51         let cleanup_pad = bcx.lpad().and_then(|lp| lp.cleanuppad());
52         let cleanup_bundle = bcx.lpad().and_then(|l| l.bundle());
53
54         let funclet_br = |this: &Self, bcx: BlockAndBuilder, bb: mir::BasicBlock| {
55             let lltarget = this.blocks[bb].llbb;
56             if let Some(cp) = cleanup_pad {
57                 match this.cleanup_kinds[bb] {
58                     CleanupKind::Funclet => {
59                         // micro-optimization: generate a `ret` rather than a jump
60                         // to a return block
61                         bcx.cleanup_ret(cp, Some(lltarget));
62                     }
63                     CleanupKind::Internal { .. } => bcx.br(lltarget),
64                     CleanupKind::NotCleanup => bug!("jump from cleanup bb to bb {:?}", bb)
65                 }
66             } else {
67                 bcx.br(lltarget);
68             }
69         };
70
71         let llblock = |this: &mut Self, target: mir::BasicBlock| {
72             let lltarget = this.blocks[target].llbb;
73
74             if let Some(cp) = cleanup_pad {
75                 match this.cleanup_kinds[target] {
76                     CleanupKind::Funclet => {
77                         // MSVC cross-funclet jump - need a trampoline
78
79                         debug!("llblock: creating cleanup trampoline for {:?}", target);
80                         let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
81                         let trampoline = this.fcx.new_block(name).build();
82                         trampoline.set_personality_fn(this.fcx.eh_personality());
83                         trampoline.cleanup_ret(cp, Some(lltarget));
84                         trampoline.llbb()
85                     }
86                     CleanupKind::Internal { .. } => lltarget,
87                     CleanupKind::NotCleanup =>
88                         bug!("jump from cleanup bb {:?} to bb {:?}", bb, target)
89                 }
90             } else {
91                 if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
92                     (this.cleanup_kinds[bb], this.cleanup_kinds[target])
93                 {
94                     // jump *into* cleanup - need a landing pad if GNU
95                     this.landing_pad_to(target).llbb
96                 } else {
97                     lltarget
98                 }
99             }
100         };
101
102         for statement in &data.statements {
103             bcx = self.trans_statement(bcx, statement);
104         }
105
106         let terminator = data.terminator();
107         debug!("trans_block: terminator: {:?}", terminator);
108
109         let span = terminator.source_info.span;
110         let debug_loc = self.debug_loc(terminator.source_info);
111         debug_loc.apply_to_bcx(&bcx);
112         debug_loc.apply(bcx.fcx());
113         match terminator.kind {
114             mir::TerminatorKind::Resume => {
115                 if let Some(cleanup_pad) = cleanup_pad {
116                     bcx.cleanup_ret(cleanup_pad, None);
117                 } else {
118                     let ps = self.get_personality_slot(&bcx);
119                     let lp = bcx.load(ps);
120                     bcx.with_block(|bcx| {
121                         base::call_lifetime_end(bcx, ps);
122                         base::trans_unwind_resume(bcx, lp);
123                     });
124                 }
125             }
126
127             mir::TerminatorKind::Goto { target } => {
128                 funclet_br(self, bcx, target);
129             }
130
131             mir::TerminatorKind::If { ref cond, targets: (true_bb, false_bb) } => {
132                 let cond = self.trans_operand(&bcx, cond);
133
134                 let lltrue = llblock(self, true_bb);
135                 let llfalse = llblock(self, false_bb);
136                 bcx.cond_br(cond.immediate(), lltrue, llfalse);
137             }
138
139             mir::TerminatorKind::Switch { ref discr, ref adt_def, ref targets } => {
140                 let discr_lvalue = self.trans_lvalue(&bcx, discr);
141                 let ty = discr_lvalue.ty.to_ty(bcx.tcx());
142                 let repr = adt::represent_type(bcx.ccx(), ty);
143                 let discr = bcx.with_block(|bcx|
144                     adt::trans_get_discr(bcx, &repr, discr_lvalue.llval, None, true)
145                 );
146
147                 let mut bb_hist = FnvHashMap();
148                 for target in targets {
149                     *bb_hist.entry(target).or_insert(0) += 1;
150                 }
151                 let (default_bb, default_blk) = match bb_hist.iter().max_by_key(|&(_, c)| c) {
152                     // If a single target basic blocks is predominant, promote that to be the
153                     // default case for the switch instruction to reduce the size of the generated
154                     // code. This is especially helpful in cases like an if-let on a huge enum.
155                     // Note: This optimization is only valid for exhaustive matches.
156                     Some((&&bb, &c)) if c > targets.len() / 2 => {
157                         (Some(bb), llblock(self, bb))
158                     }
159                     // We're generating an exhaustive switch, so the else branch
160                     // can't be hit.  Branching to an unreachable instruction
161                     // lets LLVM know this
162                     _ => (None, self.unreachable_block().llbb)
163                 };
164                 let switch = bcx.switch(discr, default_blk, targets.len());
165                 assert_eq!(adt_def.variants.len(), targets.len());
166                 for (adt_variant, &target) in adt_def.variants.iter().zip(targets) {
167                     if default_bb != Some(target) {
168                         let llbb = llblock(self, target);
169                         let llval = bcx.with_block(|bcx| adt::trans_case(
170                                 bcx, &repr, Disr::from(adt_variant.disr_val)));
171                         build::AddCase(switch, llval, llbb)
172                     }
173                 }
174             }
175
176             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
177                 let (otherwise, targets) = targets.split_last().unwrap();
178                 let discr = bcx.load(self.trans_lvalue(&bcx, discr).llval);
179                 let discr = bcx.with_block(|bcx| base::to_immediate(bcx, discr, switch_ty));
180                 let switch = bcx.switch(discr, llblock(self, *otherwise), values.len());
181                 for (value, target) in values.iter().zip(targets) {
182                     let val = Const::from_constval(bcx.ccx(), value.clone(), switch_ty);
183                     let llbb = llblock(self, *target);
184                     build::AddCase(switch, val.llval, llbb)
185                 }
186             }
187
188             mir::TerminatorKind::Return => {
189                 let ret = bcx.fcx().fn_ty.ret;
190                 if ret.is_ignore() || ret.is_indirect() {
191                     bcx.ret_void();
192                     return;
193                 }
194
195                 let llval = if let Some(cast_ty) = ret.cast {
196                     let index = mir.local_index(&mir::Lvalue::ReturnPointer).unwrap();
197                     let op = match self.locals[index] {
198                         LocalRef::Operand(Some(op)) => op,
199                         LocalRef::Operand(None) => bug!("use of return before def"),
200                         LocalRef::Lvalue(tr_lvalue) => {
201                             OperandRef {
202                                 val: Ref(tr_lvalue.llval),
203                                 ty: tr_lvalue.ty.to_ty(bcx.tcx())
204                             }
205                         }
206                     };
207                     let llslot = match op.val {
208                         Immediate(_) | Pair(..) => {
209                             let llscratch = build::AllocaFcx(bcx.fcx(), ret.original_ty, "ret");
210                             self.store_operand(&bcx, llscratch, op);
211                             llscratch
212                         }
213                         Ref(llval) => llval
214                     };
215                     let load = bcx.load(bcx.pointercast(llslot, cast_ty.ptr_to()));
216                     let llalign = llalign_of_min(bcx.ccx(), ret.ty);
217                     unsafe {
218                         llvm::LLVMSetAlignment(load, llalign);
219                     }
220                     load
221                 } else {
222                     let op = self.trans_consume(&bcx, &mir::Lvalue::ReturnPointer);
223                     op.pack_if_pair(&bcx).immediate()
224                 };
225                 bcx.ret(llval);
226             }
227
228             mir::TerminatorKind::Unreachable => {
229                 bcx.unreachable();
230             }
231
232             mir::TerminatorKind::Drop { ref location, target, unwind } => {
233                 let ty = location.ty(&mir, bcx.tcx()).to_ty(bcx.tcx());
234                 let ty = bcx.monomorphize(&ty);
235
236                 // Double check for necessity to drop
237                 if !glue::type_needs_drop(bcx.tcx(), ty) {
238                     funclet_br(self, bcx, target);
239                     return;
240                 }
241
242                 let lvalue = self.trans_lvalue(&bcx, location);
243                 let drop_fn = glue::get_drop_glue(bcx.ccx(), ty);
244                 let drop_ty = glue::get_drop_glue_type(bcx.tcx(), ty);
245                 let llvalue = if drop_ty != ty {
246                     bcx.pointercast(lvalue.llval, type_of::type_of(bcx.ccx(), drop_ty).ptr_to())
247                 } else {
248                     lvalue.llval
249                 };
250                 if let Some(unwind) = unwind {
251                     bcx.invoke(drop_fn,
252                                &[llvalue],
253                                self.blocks[target].llbb,
254                                llblock(self, unwind),
255                                cleanup_bundle);
256                 } else {
257                     bcx.call(drop_fn, &[llvalue], cleanup_bundle);
258                     funclet_br(self, bcx, target);
259                 }
260             }
261
262             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
263                 let cond = self.trans_operand(&bcx, cond).immediate();
264                 let mut const_cond = common::const_to_opt_uint(cond).map(|c| c == 1);
265
266                 // This case can currently arise only from functions marked
267                 // with #[rustc_inherit_overflow_checks] and inlined from
268                 // another crate (mostly core::num generic/#[inline] fns),
269                 // while the current crate doesn't use overflow checks.
270                 // NOTE: Unlike binops, negation doesn't have its own
271                 // checked operation, just a comparison with the minimum
272                 // value, so we have to check for the assert message.
273                 if !bcx.ccx().check_overflow() {
274                     use rustc_const_math::ConstMathErr::Overflow;
275                     use rustc_const_math::Op::Neg;
276
277                     if let mir::AssertMessage::Math(Overflow(Neg)) = *msg {
278                         const_cond = Some(expected);
279                     }
280                 }
281
282                 // Don't translate the panic block if success if known.
283                 if const_cond == Some(expected) {
284                     funclet_br(self, bcx, target);
285                     return;
286                 }
287
288                 // Pass the condition through llvm.expect for branch hinting.
289                 let expect = bcx.ccx().get_intrinsic(&"llvm.expect.i1");
290                 let cond = bcx.call(expect, &[cond, C_bool(bcx.ccx(), expected)], None);
291
292                 // Create the failure block and the conditional branch to it.
293                 let lltarget = llblock(self, target);
294                 let panic_block = self.fcx.new_block("panic");
295                 if expected {
296                     bcx.cond_br(cond, lltarget, panic_block.llbb);
297                 } else {
298                     bcx.cond_br(cond, panic_block.llbb, lltarget);
299                 }
300
301                 // After this point, bcx is the block for the call to panic.
302                 bcx = panic_block.build();
303                 debug_loc.apply_to_bcx(&bcx);
304
305                 // Get the location information.
306                 let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
307                 let filename = token::intern_and_get_ident(&loc.file.name);
308                 let filename = C_str_slice(bcx.ccx(), filename);
309                 let line = C_u32(bcx.ccx(), loc.line as u32);
310
311                 // Put together the arguments to the panic entry point.
312                 let (lang_item, args, const_err) = match *msg {
313                     mir::AssertMessage::BoundsCheck { ref len, ref index } => {
314                         let len = self.trans_operand(&mut bcx, len).immediate();
315                         let index = self.trans_operand(&mut bcx, index).immediate();
316
317                         let const_err = common::const_to_opt_uint(len).and_then(|len| {
318                             common::const_to_opt_uint(index).map(|index| {
319                                 ErrKind::IndexOutOfBounds {
320                                     len: len,
321                                     index: index
322                                 }
323                             })
324                         });
325
326                         let file_line = C_struct(bcx.ccx(), &[filename, line], false);
327                         let align = llalign_of_min(bcx.ccx(), common::val_ty(file_line));
328                         let file_line = consts::addr_of(bcx.ccx(),
329                                                         file_line,
330                                                         align,
331                                                         "panic_bounds_check_loc");
332                         (lang_items::PanicBoundsCheckFnLangItem,
333                          vec![file_line, index, len],
334                          const_err)
335                     }
336                     mir::AssertMessage::Math(ref err) => {
337                         let msg_str = token::intern_and_get_ident(err.description());
338                         let msg_str = C_str_slice(bcx.ccx(), msg_str);
339                         let msg_file_line = C_struct(bcx.ccx(),
340                                                      &[msg_str, filename, line],
341                                                      false);
342                         let align = llalign_of_min(bcx.ccx(), common::val_ty(msg_file_line));
343                         let msg_file_line = consts::addr_of(bcx.ccx(),
344                                                             msg_file_line,
345                                                             align,
346                                                             "panic_loc");
347                         (lang_items::PanicFnLangItem,
348                          vec![msg_file_line],
349                          Some(ErrKind::Math(err.clone())))
350                     }
351                 };
352
353                 // If we know we always panic, and the error message
354                 // is also constant, then we can produce a warning.
355                 if const_cond == Some(!expected) {
356                     if let Some(err) = const_err {
357                         let err = ConstEvalErr{ span: span, kind: err };
358                         let mut diag = bcx.tcx().sess.struct_span_warn(
359                             span, "this expression will panic at run-time");
360                         note_const_eval_err(bcx.tcx(), &err, span, "expression", &mut diag);
361                         diag.emit();
362                     }
363                 }
364
365                 // Obtain the panic entry point.
366                 let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
367                 let callee = Callee::def(bcx.ccx(), def_id,
368                     bcx.ccx().empty_substs_for_def_id(def_id));
369                 let llfn = callee.reify(bcx.ccx());
370
371                 // Translate the actual panic invoke/call.
372                 if let Some(unwind) = cleanup {
373                     bcx.invoke(llfn,
374                                &args,
375                                self.unreachable_block().llbb,
376                                llblock(self, unwind),
377                                cleanup_bundle);
378                 } else {
379                     bcx.call(llfn, &args, cleanup_bundle);
380                     bcx.unreachable();
381                 }
382             }
383
384             mir::TerminatorKind::DropAndReplace { .. } => {
385                 bug!("undesugared DropAndReplace in trans: {:?}", data);
386             }
387
388             mir::TerminatorKind::Call { ref func, ref args, ref destination, ref cleanup } => {
389                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
390                 let callee = self.trans_operand(&bcx, func);
391
392                 let (mut callee, abi, sig) = match callee.ty.sty {
393                     ty::TyFnDef(def_id, substs, f) => {
394                         (Callee::def(bcx.ccx(), def_id, substs), f.abi, &f.sig)
395                     }
396                     ty::TyFnPtr(f) => {
397                         (Callee {
398                             data: Fn(callee.immediate()),
399                             ty: callee.ty
400                         }, f.abi, &f.sig)
401                     }
402                     _ => bug!("{} is not callable", callee.ty)
403                 };
404
405                 let sig = bcx.tcx().erase_late_bound_regions(sig);
406
407                 // Handle intrinsics old trans wants Expr's for, ourselves.
408                 let intrinsic = match (&callee.ty.sty, &callee.data) {
409                     (&ty::TyFnDef(def_id, _, _), &Intrinsic) => {
410                         Some(bcx.tcx().item_name(def_id).as_str())
411                     }
412                     _ => None
413                 };
414                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
415
416                 if intrinsic == Some("move_val_init") {
417                     let &(_, target) = destination.as_ref().unwrap();
418                     // The first argument is a thin destination pointer.
419                     let llptr = self.trans_operand(&bcx, &args[0]).immediate();
420                     let val = self.trans_operand(&bcx, &args[1]);
421                     self.store_operand(&bcx, llptr, val);
422                     funclet_br(self, bcx, target);
423                     return;
424                 }
425
426                 if intrinsic == Some("transmute") {
427                     let &(ref dest, target) = destination.as_ref().unwrap();
428                     self.with_lvalue_ref(&bcx, dest, |this, dest| {
429                         this.trans_transmute(&bcx, &args[0], dest);
430                     });
431
432                     funclet_br(self, bcx, target);
433                     return;
434                 }
435
436                 let extra_args = &args[sig.inputs.len()..];
437                 let extra_args = extra_args.iter().map(|op_arg| {
438                     let op_ty = op_arg.ty(&self.mir, bcx.tcx());
439                     bcx.monomorphize(&op_ty)
440                 }).collect::<Vec<_>>();
441                 let fn_ty = callee.direct_fn_type(bcx.ccx(), &extra_args);
442
443                 // The arguments we'll be passing. Plus one to account for outptr, if used.
444                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
445                 let mut llargs = Vec::with_capacity(arg_count);
446
447                 // Prepare the return value destination
448                 let ret_dest = if let Some((ref dest, _)) = *destination {
449                     let is_intrinsic = if let Intrinsic = callee.data {
450                         true
451                     } else {
452                         false
453                     };
454                     self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs, is_intrinsic)
455                 } else {
456                     ReturnDest::Nothing
457                 };
458
459                 // Split the rust-call tupled arguments off.
460                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
461                     let (tup, args) = args.split_last().unwrap();
462                     (args, Some(tup))
463                 } else {
464                     (&args[..], None)
465                 };
466
467                 let is_shuffle = intrinsic.map_or(false, |name| {
468                     name.starts_with("simd_shuffle")
469                 });
470                 let mut idx = 0;
471                 for arg in first_args {
472                     // The indices passed to simd_shuffle* in the
473                     // third argument must be constant. This is
474                     // checked by const-qualification, which also
475                     // promotes any complex rvalues to constants.
476                     if is_shuffle && idx == 2 {
477                         match *arg {
478                             mir::Operand::Consume(_) => {
479                                 span_bug!(span, "shuffle indices must be constant");
480                             }
481                             mir::Operand::Constant(ref constant) => {
482                                 let val = self.trans_constant(&bcx, constant);
483                                 llargs.push(val.llval);
484                                 idx += 1;
485                                 continue;
486                             }
487                         }
488                     }
489
490                     let op = self.trans_operand(&bcx, arg);
491                     self.trans_argument(&bcx, op, &mut llargs, &fn_ty,
492                                         &mut idx, &mut callee.data);
493                 }
494                 if let Some(tup) = untuple {
495                     self.trans_arguments_untupled(&bcx, tup, &mut llargs, &fn_ty,
496                                                   &mut idx, &mut callee.data)
497                 }
498
499                 let fn_ptr = match callee.data {
500                     NamedTupleConstructor(_) => {
501                         // FIXME translate this like mir::Rvalue::Aggregate.
502                         callee.reify(bcx.ccx())
503                     }
504                     Intrinsic => {
505                         use intrinsic::trans_intrinsic_call;
506
507                         let (dest, llargs) = match ret_dest {
508                             _ if fn_ty.ret.is_indirect() => {
509                                 (llargs[0], &llargs[1..])
510                             }
511                             ReturnDest::Nothing => {
512                                 (C_undef(fn_ty.ret.original_ty.ptr_to()), &llargs[..])
513                             }
514                             ReturnDest::IndirectOperand(dst, _) |
515                             ReturnDest::Store(dst) => (dst, &llargs[..]),
516                             ReturnDest::DirectOperand(_) =>
517                                 bug!("Cannot use direct operand with an intrinsic call")
518                         };
519
520                         bcx.with_block(|bcx| {
521                             trans_intrinsic_call(bcx, callee.ty, &fn_ty,
522                                                  &llargs, dest, debug_loc);
523                         });
524
525                         if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
526                             // Make a fake operand for store_return
527                             let op = OperandRef {
528                                 val: Ref(dst),
529                                 ty: sig.output,
530                             };
531                             self.store_return(&bcx, ret_dest, fn_ty.ret, op);
532                         }
533
534                         if let Some((_, target)) = *destination {
535                             funclet_br(self, bcx, target);
536                         } else {
537                             // trans_intrinsic_call already used Unreachable.
538                             // bcx.unreachable();
539                         }
540
541                         return;
542                     }
543                     Fn(f) => f,
544                     Virtual(_) => bug!("Virtual fn ptr not extracted")
545                 };
546
547                 // Many different ways to call a function handled here
548                 if let &Some(cleanup) = cleanup {
549                     let ret_bcx = if let Some((_, target)) = *destination {
550                         self.blocks[target]
551                     } else {
552                         self.unreachable_block()
553                     };
554                     let invokeret = bcx.invoke(fn_ptr,
555                                                &llargs,
556                                                ret_bcx.llbb,
557                                                llblock(self, cleanup),
558                                                cleanup_bundle);
559                     fn_ty.apply_attrs_callsite(invokeret);
560
561                     if destination.is_some() {
562                         let ret_bcx = ret_bcx.build();
563                         ret_bcx.at_start(|ret_bcx| {
564                             debug_loc.apply_to_bcx(ret_bcx);
565                             let op = OperandRef {
566                                 val: Immediate(invokeret),
567                                 ty: sig.output,
568                             };
569                             self.store_return(&ret_bcx, ret_dest, fn_ty.ret, op);
570                         });
571                     }
572                 } else {
573                     let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
574                     fn_ty.apply_attrs_callsite(llret);
575                     if let Some((_, target)) = *destination {
576                         let op = OperandRef {
577                             val: Immediate(llret),
578                             ty: sig.output,
579                         };
580                         self.store_return(&bcx, ret_dest, fn_ty.ret, op);
581                         funclet_br(self, bcx, target);
582                     } else {
583                         bcx.unreachable();
584                     }
585                 }
586             }
587         }
588     }
589
590     fn trans_argument(&mut self,
591                       bcx: &BlockAndBuilder<'bcx, 'tcx>,
592                       op: OperandRef<'tcx>,
593                       llargs: &mut Vec<ValueRef>,
594                       fn_ty: &FnType,
595                       next_idx: &mut usize,
596                       callee: &mut CalleeData) {
597         if let Pair(a, b) = op.val {
598             // Treat the values in a fat pointer separately.
599             if common::type_is_fat_ptr(bcx.tcx(), op.ty) {
600                 let (ptr, meta) = (a, b);
601                 if *next_idx == 0 {
602                     if let Virtual(idx) = *callee {
603                         let llfn = bcx.with_block(|bcx| {
604                             meth::get_virtual_method(bcx, meta, idx)
605                         });
606                         let llty = fn_ty.llvm_type(bcx.ccx()).ptr_to();
607                         *callee = Fn(bcx.pointercast(llfn, llty));
608                     }
609                 }
610
611                 let imm_op = |x| OperandRef {
612                     val: Immediate(x),
613                     // We won't be checking the type again.
614                     ty: bcx.tcx().types.err
615                 };
616                 self.trans_argument(bcx, imm_op(ptr), llargs, fn_ty, next_idx, callee);
617                 self.trans_argument(bcx, imm_op(meta), llargs, fn_ty, next_idx, callee);
618                 return;
619             }
620         }
621
622         let arg = &fn_ty.args[*next_idx];
623         *next_idx += 1;
624
625         // Fill padding with undef value, where applicable.
626         if let Some(ty) = arg.pad {
627             llargs.push(C_undef(ty));
628         }
629
630         if arg.is_ignore() {
631             return;
632         }
633
634         // Force by-ref if we have to load through a cast pointer.
635         let (mut llval, by_ref) = match op.val {
636             Immediate(_) | Pair(..) => {
637                 if arg.is_indirect() || arg.cast.is_some() {
638                     let llscratch = build::AllocaFcx(bcx.fcx(), arg.original_ty, "arg");
639                     self.store_operand(bcx, llscratch, op);
640                     (llscratch, true)
641                 } else {
642                     (op.pack_if_pair(bcx).immediate(), false)
643                 }
644             }
645             Ref(llval) => (llval, true)
646         };
647
648         if by_ref && !arg.is_indirect() {
649             // Have to load the argument, maybe while casting it.
650             if arg.original_ty == Type::i1(bcx.ccx()) {
651                 // We store bools as i8 so we need to truncate to i1.
652                 llval = bcx.load_range_assert(llval, 0, 2, llvm::False);
653                 llval = bcx.trunc(llval, arg.original_ty);
654             } else if let Some(ty) = arg.cast {
655                 llval = bcx.load(bcx.pointercast(llval, ty.ptr_to()));
656                 let llalign = llalign_of_min(bcx.ccx(), arg.ty);
657                 unsafe {
658                     llvm::LLVMSetAlignment(llval, llalign);
659                 }
660             } else {
661                 llval = bcx.load(llval);
662             }
663         }
664
665         llargs.push(llval);
666     }
667
668     fn trans_arguments_untupled(&mut self,
669                                 bcx: &BlockAndBuilder<'bcx, 'tcx>,
670                                 operand: &mir::Operand<'tcx>,
671                                 llargs: &mut Vec<ValueRef>,
672                                 fn_ty: &FnType,
673                                 next_idx: &mut usize,
674                                 callee: &mut CalleeData) {
675         let tuple = self.trans_operand(bcx, operand);
676
677         let arg_types = match tuple.ty.sty {
678             ty::TyTuple(ref tys) => tys,
679             _ => span_bug!(self.mir.span,
680                            "bad final argument to \"rust-call\" fn {:?}", tuple.ty)
681         };
682
683         // Handle both by-ref and immediate tuples.
684         match tuple.val {
685             Ref(llval) => {
686                 let base_repr = adt::represent_type(bcx.ccx(), tuple.ty);
687                 let base = adt::MaybeSizedValue::sized(llval);
688                 for (n, &ty) in arg_types.iter().enumerate() {
689                     let ptr = adt::trans_field_ptr_builder(bcx, &base_repr, base, Disr(0), n);
690                     let val = if common::type_is_fat_ptr(bcx.tcx(), ty) {
691                         let (lldata, llextra) = load_fat_ptr(bcx, ptr);
692                         Pair(lldata, llextra)
693                     } else {
694                         // trans_argument will load this if it needs to
695                         Ref(ptr)
696                     };
697                     let op = OperandRef {
698                         val: val,
699                         ty: ty
700                     };
701                     self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
702                 }
703
704             }
705             Immediate(llval) => {
706                 for (n, &ty) in arg_types.iter().enumerate() {
707                     let mut elem = bcx.extract_value(llval, n);
708                     // Truncate bools to i1, if needed
709                     if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx()) {
710                         elem = bcx.trunc(elem, Type::i1(bcx.ccx()));
711                     }
712                     // If the tuple is immediate, the elements are as well
713                     let op = OperandRef {
714                         val: Immediate(elem),
715                         ty: ty
716                     };
717                     self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
718                 }
719             }
720             Pair(a, b) => {
721                 let elems = [a, b];
722                 for (n, &ty) in arg_types.iter().enumerate() {
723                     let mut elem = elems[n];
724                     // Truncate bools to i1, if needed
725                     if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx()) {
726                         elem = bcx.trunc(elem, Type::i1(bcx.ccx()));
727                     }
728                     // Pair is always made up of immediates
729                     let op = OperandRef {
730                         val: Immediate(elem),
731                         ty: ty
732                     };
733                     self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
734                 }
735             }
736         }
737
738     }
739
740     fn get_personality_slot(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>) -> ValueRef {
741         let ccx = bcx.ccx();
742         if let Some(slot) = self.llpersonalityslot {
743             slot
744         } else {
745             let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
746             bcx.with_block(|bcx| {
747                 let slot = base::alloca(bcx, llretty, "personalityslot");
748                 self.llpersonalityslot = Some(slot);
749                 base::call_lifetime_start(bcx, slot);
750                 slot
751             })
752         }
753     }
754
755     /// Return the landingpad wrapper around the given basic block
756     ///
757     /// No-op in MSVC SEH scheme.
758     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> Block<'bcx, 'tcx>
759     {
760         if let Some(block) = self.landing_pads[target_bb] {
761             return block;
762         }
763
764         if base::wants_msvc_seh(self.fcx.ccx.sess()) {
765             return self.blocks[target_bb];
766         }
767
768         let target = self.bcx(target_bb);
769
770         let block = self.fcx.new_block("cleanup");
771         self.landing_pads[target_bb] = Some(block);
772
773         let bcx = block.build();
774         let ccx = bcx.ccx();
775         let llpersonality = self.fcx.eh_personality();
776         let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
777         let llretval = bcx.landing_pad(llretty, llpersonality, 1, self.fcx.llfn);
778         bcx.set_cleanup(llretval);
779         let slot = self.get_personality_slot(&bcx);
780         bcx.store(llretval, slot);
781         bcx.br(target.llbb());
782         block
783     }
784
785     pub fn init_cpad(&mut self, bb: mir::BasicBlock) {
786         let bcx = self.bcx(bb);
787         let data = &self.mir[bb];
788         debug!("init_cpad({:?})", data);
789
790         match self.cleanup_kinds[bb] {
791             CleanupKind::NotCleanup => {
792                 bcx.set_lpad(None)
793             }
794             _ if !base::wants_msvc_seh(bcx.sess()) => {
795                 bcx.set_lpad(Some(LandingPad::gnu()))
796             }
797             CleanupKind::Internal { funclet } => {
798                 // FIXME: is this needed?
799                 bcx.set_personality_fn(self.fcx.eh_personality());
800                 bcx.set_lpad_ref(self.bcx(funclet).lpad());
801             }
802             CleanupKind::Funclet => {
803                 bcx.set_personality_fn(self.fcx.eh_personality());
804                 DebugLoc::None.apply_to_bcx(&bcx);
805                 let cleanup_pad = bcx.cleanup_pad(None, &[]);
806                 bcx.set_lpad(Some(LandingPad::msvc(cleanup_pad)));
807             }
808         };
809     }
810
811     fn unreachable_block(&mut self) -> Block<'bcx, 'tcx> {
812         self.unreachable_block.unwrap_or_else(|| {
813             let bl = self.fcx.new_block("unreachable");
814             bl.build().unreachable();
815             self.unreachable_block = Some(bl);
816             bl
817         })
818     }
819
820     fn bcx(&self, bb: mir::BasicBlock) -> BlockAndBuilder<'bcx, 'tcx> {
821         self.blocks[bb].build()
822     }
823
824     fn make_return_dest(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
825                         dest: &mir::Lvalue<'tcx>, fn_ret_ty: &ArgType,
826                         llargs: &mut Vec<ValueRef>, is_intrinsic: bool) -> ReturnDest {
827         // If the return is ignored, we can just return a do-nothing ReturnDest
828         if fn_ret_ty.is_ignore() {
829             return ReturnDest::Nothing;
830         }
831         let dest = if let Some(index) = self.mir.local_index(dest) {
832             let ret_ty = self.monomorphized_lvalue_ty(dest);
833             match self.locals[index] {
834                 LocalRef::Lvalue(dest) => dest,
835                 LocalRef::Operand(None) => {
836                     // Handle temporary lvalues, specifically Operand ones, as
837                     // they don't have allocas
838                     return if fn_ret_ty.is_indirect() {
839                         // Odd, but possible, case, we have an operand temporary,
840                         // but the calling convention has an indirect return.
841                         let tmp = bcx.with_block(|bcx| {
842                             base::alloc_ty(bcx, ret_ty, "tmp_ret")
843                         });
844                         llargs.push(tmp);
845                         ReturnDest::IndirectOperand(tmp, index)
846                     } else if is_intrinsic {
847                         // Currently, intrinsics always need a location to store
848                         // the result. so we create a temporary alloca for the
849                         // result
850                         let tmp = bcx.with_block(|bcx| {
851                             base::alloc_ty(bcx, ret_ty, "tmp_ret")
852                         });
853                         ReturnDest::IndirectOperand(tmp, index)
854                     } else {
855                         ReturnDest::DirectOperand(index)
856                     };
857                 }
858                 LocalRef::Operand(Some(_)) => {
859                     bug!("lvalue local already assigned to");
860                 }
861             }
862         } else {
863             self.trans_lvalue(bcx, dest)
864         };
865         if fn_ret_ty.is_indirect() {
866             llargs.push(dest.llval);
867             ReturnDest::Nothing
868         } else {
869             ReturnDest::Store(dest.llval)
870         }
871     }
872
873     fn trans_transmute(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
874                        src: &mir::Operand<'tcx>, dst: LvalueRef<'tcx>) {
875         let mut val = self.trans_operand(bcx, src);
876         if let ty::TyFnDef(def_id, substs, _) = val.ty.sty {
877             let llouttype = type_of::type_of(bcx.ccx(), dst.ty.to_ty(bcx.tcx()));
878             let out_type_size = llbitsize_of_real(bcx.ccx(), llouttype);
879             if out_type_size != 0 {
880                 // FIXME #19925 Remove this hack after a release cycle.
881                 let f = Callee::def(bcx.ccx(), def_id, substs);
882                 let ty = match f.ty.sty {
883                     ty::TyFnDef(_, _, f) => bcx.tcx().mk_fn_ptr(f),
884                     _ => f.ty
885                 };
886                 val = OperandRef {
887                     val: Immediate(f.reify(bcx.ccx())),
888                     ty: ty
889                 };
890             }
891         }
892
893         let llty = type_of::type_of(bcx.ccx(), val.ty);
894         let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to());
895         self.store_operand(bcx, cast_ptr, val);
896     }
897
898
899     // Stores the return value of a function call into it's final location.
900     fn store_return(&mut self,
901                     bcx: &BlockAndBuilder<'bcx, 'tcx>,
902                     dest: ReturnDest,
903                     ret_ty: ArgType,
904                     op: OperandRef<'tcx>) {
905         use self::ReturnDest::*;
906
907         match dest {
908             Nothing => (),
909             Store(dst) => ret_ty.store(bcx, op.immediate(), dst),
910             IndirectOperand(tmp, index) => {
911                 let op = self.trans_load(bcx, tmp, op.ty);
912                 self.locals[index] = LocalRef::Operand(Some(op));
913             }
914             DirectOperand(index) => {
915                 // If there is a cast, we have to store and reload.
916                 let op = if ret_ty.cast.is_some() {
917                     let tmp = bcx.with_block(|bcx| {
918                         base::alloc_ty(bcx, op.ty, "tmp_ret")
919                     });
920                     ret_ty.store(bcx, op.immediate(), tmp);
921                     self.trans_load(bcx, tmp, op.ty)
922                 } else {
923                     op.unpack_if_pair(bcx)
924                 };
925                 self.locals[index] = LocalRef::Operand(Some(op));
926             }
927         }
928     }
929 }
930
931 enum ReturnDest {
932     // Do nothing, the return value is indirect or ignored
933     Nothing,
934     // Store the return value to the pointer
935     Store(ValueRef),
936     // Stores an indirect return value to an operand local lvalue
937     IndirectOperand(ValueRef, mir::Local),
938     // Stores a direct return value to an operand local lvalue
939     DirectOperand(mir::Local)
940 }