]> git.lizzy.rs Git - rust.git/blob - src/librustc_trans/mir/block.rs
MIR: split Operand::Consume into Copy and Move.
[rust.git] / src / librustc_trans / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{self, ValueRef, BasicBlockRef};
12 use rustc::middle::lang_items;
13 use rustc::middle::const_val::{ConstEvalErr, ConstInt, ErrKind};
14 use rustc::ty::{self, TypeFoldable};
15 use rustc::ty::layout::{self, LayoutOf};
16 use rustc::traits;
17 use rustc::mir;
18 use abi::{Abi, FnType, ArgType, PassMode};
19 use base;
20 use callee;
21 use builder::Builder;
22 use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_undef};
23 use consts;
24 use meth;
25 use monomorphize;
26 use type_of::LayoutLlvmExt;
27 use type_::Type;
28
29 use syntax::symbol::Symbol;
30 use syntax_pos::Pos;
31
32 use super::{MirContext, LocalRef};
33 use super::constant::Const;
34 use super::lvalue::{Alignment, LvalueRef};
35 use super::operand::OperandRef;
36 use super::operand::OperandValue::{Pair, Ref, Immediate};
37
38 impl<'a, 'tcx> MirContext<'a, 'tcx> {
39     pub fn trans_block(&mut self, bb: mir::BasicBlock) {
40         let mut bcx = self.get_builder(bb);
41         let data = &self.mir[bb];
42
43         debug!("trans_block({:?}={:?})", bb, data);
44
45         for statement in &data.statements {
46             bcx = self.trans_statement(bcx, statement);
47         }
48
49         self.trans_terminator(bcx, bb, data.terminator());
50     }
51
52     fn trans_terminator(&mut self,
53                         mut bcx: Builder<'a, 'tcx>,
54                         bb: mir::BasicBlock,
55                         terminator: &mir::Terminator<'tcx>)
56     {
57         debug!("trans_terminator: {:?}", terminator);
58
59         // Create the cleanup bundle, if needed.
60         let tcx = bcx.tcx();
61         let span = terminator.source_info.span;
62         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
63         let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
64
65         let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
66         let cleanup_bundle = funclet.map(|l| l.bundle());
67
68         let lltarget = |this: &mut Self, target: mir::BasicBlock| {
69             let lltarget = this.blocks[target];
70             let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
71             match (funclet_bb, target_funclet) {
72                 (None, None) => (lltarget, false),
73                 (Some(f), Some(t_f))
74                     if f == t_f || !base::wants_msvc_seh(tcx.sess)
75                     => (lltarget, false),
76                 (None, Some(_)) => {
77                     // jump *into* cleanup - need a landing pad if GNU
78                     (this.landing_pad_to(target), false)
79                 }
80                 (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
81                 (Some(_), Some(_)) => {
82                     (this.landing_pad_to(target), true)
83                 }
84             }
85         };
86
87         let llblock = |this: &mut Self, target: mir::BasicBlock| {
88             let (lltarget, is_cleanupret) = lltarget(this, target);
89             if is_cleanupret {
90                 // MSVC cross-funclet jump - need a trampoline
91
92                 debug!("llblock: creating cleanup trampoline for {:?}", target);
93                 let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
94                 let trampoline = this.new_block(name);
95                 trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
96                 trampoline.llbb()
97             } else {
98                 lltarget
99             }
100         };
101
102         let funclet_br = |this: &mut Self, bcx: Builder, target: mir::BasicBlock| {
103             let (lltarget, is_cleanupret) = lltarget(this, target);
104             if is_cleanupret {
105                 // micro-optimization: generate a `ret` rather than a jump
106                 // to a trampoline.
107                 bcx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
108             } else {
109                 bcx.br(lltarget);
110             }
111         };
112
113         let do_call = |
114             this: &mut Self,
115             bcx: Builder<'a, 'tcx>,
116             fn_ty: FnType<'tcx>,
117             fn_ptr: ValueRef,
118             llargs: &[ValueRef],
119             destination: Option<(ReturnDest<'tcx>, mir::BasicBlock)>,
120             cleanup: Option<mir::BasicBlock>
121         | {
122             if let Some(cleanup) = cleanup {
123                 let ret_bcx = if let Some((_, target)) = destination {
124                     this.blocks[target]
125                 } else {
126                     this.unreachable_block()
127                 };
128                 let invokeret = bcx.invoke(fn_ptr,
129                                            &llargs,
130                                            ret_bcx,
131                                            llblock(this, cleanup),
132                                            cleanup_bundle);
133                 fn_ty.apply_attrs_callsite(invokeret);
134
135                 if let Some((ret_dest, target)) = destination {
136                     let ret_bcx = this.get_builder(target);
137                     this.set_debug_loc(&ret_bcx, terminator.source_info);
138                     this.store_return(&ret_bcx, ret_dest, &fn_ty.ret, invokeret);
139                 }
140             } else {
141                 let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
142                 fn_ty.apply_attrs_callsite(llret);
143                 if this.mir[bb].is_cleanup {
144                     // Cleanup is always the cold path. Don't inline
145                     // drop glue. Also, when there is a deeply-nested
146                     // struct, there are "symmetry" issues that cause
147                     // exponential inlining - see issue #41696.
148                     llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
149                 }
150
151                 if let Some((ret_dest, target)) = destination {
152                     this.store_return(&bcx, ret_dest, &fn_ty.ret, llret);
153                     funclet_br(this, bcx, target);
154                 } else {
155                     bcx.unreachable();
156                 }
157             }
158         };
159
160         self.set_debug_loc(&bcx, terminator.source_info);
161         match terminator.kind {
162             mir::TerminatorKind::Resume => {
163                 if let Some(cleanup_pad) = cleanup_pad {
164                     bcx.cleanup_ret(cleanup_pad, None);
165                 } else {
166                     let slot = self.get_personality_slot(&bcx);
167                     let lp0 = slot.project_field(&bcx, 0).load(&bcx).immediate();
168                     let lp1 = slot.project_field(&bcx, 1).load(&bcx).immediate();
169                     slot.storage_dead(&bcx);
170
171                     if !bcx.sess().target.target.options.custom_unwind_resume {
172                         let mut lp = C_undef(self.landing_pad_type());
173                         lp = bcx.insert_value(lp, lp0, 0);
174                         lp = bcx.insert_value(lp, lp1, 1);
175                         bcx.resume(lp);
176                     } else {
177                         bcx.call(bcx.ccx.eh_unwind_resume(), &[lp0], cleanup_bundle);
178                         bcx.unreachable();
179                     }
180                 }
181             }
182
183             mir::TerminatorKind::Goto { target } => {
184                 funclet_br(self, bcx, target);
185             }
186
187             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
188                 let discr = self.trans_operand(&bcx, discr);
189                 if switch_ty == bcx.tcx().types.bool {
190                     let lltrue = llblock(self, targets[0]);
191                     let llfalse = llblock(self, targets[1]);
192                     if let [ConstInt::U8(0)] = values[..] {
193                         bcx.cond_br(discr.immediate(), llfalse, lltrue);
194                     } else {
195                         bcx.cond_br(discr.immediate(), lltrue, llfalse);
196                     }
197                 } else {
198                     let (otherwise, targets) = targets.split_last().unwrap();
199                     let switch = bcx.switch(discr.immediate(),
200                                             llblock(self, *otherwise), values.len());
201                     for (value, target) in values.iter().zip(targets) {
202                         let val = Const::from_constint(bcx.ccx, value);
203                         let llbb = llblock(self, *target);
204                         bcx.add_case(switch, val.llval, llbb)
205                     }
206                 }
207             }
208
209             mir::TerminatorKind::Return => {
210                 let llval = match self.fn_ty.ret.mode {
211                     PassMode::Ignore | PassMode::Indirect(_) => {
212                         bcx.ret_void();
213                         return;
214                     }
215
216                     PassMode::Direct(_) | PassMode::Pair(..) => {
217                         let op = self.trans_consume(&bcx, &mir::Lvalue::Local(mir::RETURN_POINTER));
218                         if let Ref(llval, align) = op.val {
219                             bcx.load(llval, align.non_abi())
220                         } else {
221                             op.immediate_or_packed_pair(&bcx)
222                         }
223                     }
224
225                     PassMode::Cast(cast_ty) => {
226                         let op = match self.locals[mir::RETURN_POINTER] {
227                             LocalRef::Operand(Some(op)) => op,
228                             LocalRef::Operand(None) => bug!("use of return before def"),
229                             LocalRef::Lvalue(tr_lvalue) => {
230                                 OperandRef {
231                                     val: Ref(tr_lvalue.llval, tr_lvalue.alignment),
232                                     layout: tr_lvalue.layout
233                                 }
234                             }
235                         };
236                         let llslot = match op.val {
237                             Immediate(_) | Pair(..) => {
238                                 let scratch = LvalueRef::alloca(&bcx, self.fn_ty.ret.layout, "ret");
239                                 op.val.store(&bcx, scratch);
240                                 scratch.llval
241                             }
242                             Ref(llval, align) => {
243                                 assert_eq!(align, Alignment::AbiAligned,
244                                            "return pointer is unaligned!");
245                                 llval
246                             }
247                         };
248                         bcx.load(
249                             bcx.pointercast(llslot, cast_ty.llvm_type(bcx.ccx).ptr_to()),
250                             Some(self.fn_ty.ret.layout.align))
251                     }
252                 };
253                 bcx.ret(llval);
254             }
255
256             mir::TerminatorKind::Unreachable => {
257                 bcx.unreachable();
258             }
259
260             mir::TerminatorKind::Drop { ref location, target, unwind } => {
261                 let ty = location.ty(self.mir, bcx.tcx()).to_ty(bcx.tcx());
262                 let ty = self.monomorphize(&ty);
263                 let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.tcx(), ty);
264
265                 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
266                     // we don't actually need to drop anything.
267                     funclet_br(self, bcx, target);
268                     return
269                 }
270
271                 let lvalue = self.trans_lvalue(&bcx, location);
272                 let mut args: &[_] = &[lvalue.llval, lvalue.llextra];
273                 args = &args[..1 + lvalue.has_extra() as usize];
274                 let (drop_fn, fn_ty) = match ty.sty {
275                     ty::TyDynamic(..) => {
276                         let fn_ty = common::instance_ty(bcx.ccx.tcx(), &drop_fn);
277                         let sig = common::ty_fn_sig(bcx.ccx, fn_ty);
278                         let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig);
279                         let fn_ty = FnType::new_vtable(bcx.ccx, sig, &[]);
280                         args = &args[..1];
281                         (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra, &fn_ty), fn_ty)
282                     }
283                     _ => {
284                         (callee::get_fn(bcx.ccx, drop_fn),
285                          FnType::of_instance(bcx.ccx, &drop_fn))
286                     }
287                 };
288                 do_call(self, bcx, fn_ty, drop_fn, args,
289                         Some((ReturnDest::Nothing, target)),
290                         unwind);
291             }
292
293             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
294                 let cond = self.trans_operand(&bcx, cond).immediate();
295                 let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1);
296
297                 // This case can currently arise only from functions marked
298                 // with #[rustc_inherit_overflow_checks] and inlined from
299                 // another crate (mostly core::num generic/#[inline] fns),
300                 // while the current crate doesn't use overflow checks.
301                 // NOTE: Unlike binops, negation doesn't have its own
302                 // checked operation, just a comparison with the minimum
303                 // value, so we have to check for the assert message.
304                 if !bcx.ccx.check_overflow() {
305                     use rustc_const_math::ConstMathErr::Overflow;
306                     use rustc_const_math::Op::Neg;
307
308                     if let mir::AssertMessage::Math(Overflow(Neg)) = *msg {
309                         const_cond = Some(expected);
310                     }
311                 }
312
313                 // Don't translate the panic block if success if known.
314                 if const_cond == Some(expected) {
315                     funclet_br(self, bcx, target);
316                     return;
317                 }
318
319                 // Pass the condition through llvm.expect for branch hinting.
320                 let expect = bcx.ccx.get_intrinsic(&"llvm.expect.i1");
321                 let cond = bcx.call(expect, &[cond, C_bool(bcx.ccx, expected)], None);
322
323                 // Create the failure block and the conditional branch to it.
324                 let lltarget = llblock(self, target);
325                 let panic_block = self.new_block("panic");
326                 if expected {
327                     bcx.cond_br(cond, lltarget, panic_block.llbb());
328                 } else {
329                     bcx.cond_br(cond, panic_block.llbb(), lltarget);
330                 }
331
332                 // After this point, bcx is the block for the call to panic.
333                 bcx = panic_block;
334                 self.set_debug_loc(&bcx, terminator.source_info);
335
336                 // Get the location information.
337                 let loc = bcx.sess().codemap().lookup_char_pos(span.lo());
338                 let filename = Symbol::intern(&loc.file.name).as_str();
339                 let filename = C_str_slice(bcx.ccx, filename);
340                 let line = C_u32(bcx.ccx, loc.line as u32);
341                 let col = C_u32(bcx.ccx, loc.col.to_usize() as u32 + 1);
342                 let align = tcx.data_layout.aggregate_align
343                     .max(tcx.data_layout.i32_align)
344                     .max(tcx.data_layout.pointer_align);
345
346                 // Put together the arguments to the panic entry point.
347                 let (lang_item, args, const_err) = match *msg {
348                     mir::AssertMessage::BoundsCheck { ref len, ref index } => {
349                         let len = self.trans_operand(&mut bcx, len).immediate();
350                         let index = self.trans_operand(&mut bcx, index).immediate();
351
352                         let const_err = common::const_to_opt_u128(len, false)
353                             .and_then(|len| common::const_to_opt_u128(index, false)
354                                 .map(|index| ErrKind::IndexOutOfBounds {
355                                     len: len as u64,
356                                     index: index as u64
357                                 }));
358
359                         let file_line_col = C_struct(bcx.ccx, &[filename, line, col], false);
360                         let file_line_col = consts::addr_of(bcx.ccx,
361                                                             file_line_col,
362                                                             align,
363                                                             "panic_bounds_check_loc");
364                         (lang_items::PanicBoundsCheckFnLangItem,
365                          vec![file_line_col, index, len],
366                          const_err)
367                     }
368                     mir::AssertMessage::Math(ref err) => {
369                         let msg_str = Symbol::intern(err.description()).as_str();
370                         let msg_str = C_str_slice(bcx.ccx, msg_str);
371                         let msg_file_line_col = C_struct(bcx.ccx,
372                                                      &[msg_str, filename, line, col],
373                                                      false);
374                         let msg_file_line_col = consts::addr_of(bcx.ccx,
375                                                                 msg_file_line_col,
376                                                                 align,
377                                                                 "panic_loc");
378                         (lang_items::PanicFnLangItem,
379                          vec![msg_file_line_col],
380                          Some(ErrKind::Math(err.clone())))
381                     }
382                     mir::AssertMessage::GeneratorResumedAfterReturn |
383                     mir::AssertMessage::GeneratorResumedAfterPanic => {
384                         let str = if let mir::AssertMessage::GeneratorResumedAfterReturn = *msg {
385                             "generator resumed after completion"
386                         } else {
387                             "generator resumed after panicking"
388                         };
389                         let msg_str = Symbol::intern(str).as_str();
390                         let msg_str = C_str_slice(bcx.ccx, msg_str);
391                         let msg_file_line_col = C_struct(bcx.ccx,
392                                                      &[msg_str, filename, line, col],
393                                                      false);
394                         let msg_file_line_col = consts::addr_of(bcx.ccx,
395                                                                 msg_file_line_col,
396                                                                 align,
397                                                                 "panic_loc");
398                         (lang_items::PanicFnLangItem,
399                          vec![msg_file_line_col],
400                          None)
401                     }
402                 };
403
404                 // If we know we always panic, and the error message
405                 // is also constant, then we can produce a warning.
406                 if const_cond == Some(!expected) {
407                     if let Some(err) = const_err {
408                         let err = ConstEvalErr{ span: span, kind: err };
409                         let mut diag = bcx.tcx().sess.struct_span_warn(
410                             span, "this expression will panic at run-time");
411                         err.note(bcx.tcx(), span, "expression", &mut diag);
412                         diag.emit();
413                     }
414                 }
415
416                 // Obtain the panic entry point.
417                 let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
418                 let instance = ty::Instance::mono(bcx.tcx(), def_id);
419                 let fn_ty = FnType::of_instance(bcx.ccx, &instance);
420                 let llfn = callee::get_fn(bcx.ccx, instance);
421
422                 // Translate the actual panic invoke/call.
423                 do_call(self, bcx, fn_ty, llfn, &args, None, cleanup);
424             }
425
426             mir::TerminatorKind::DropAndReplace { .. } => {
427                 bug!("undesugared DropAndReplace in trans: {:?}", terminator);
428             }
429
430             mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
431                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
432                 let callee = self.trans_operand(&bcx, func);
433
434                 let (instance, mut llfn) = match callee.layout.ty.sty {
435                     ty::TyFnDef(def_id, substs) => {
436                         (Some(ty::Instance::resolve(bcx.ccx.tcx(),
437                                                     ty::ParamEnv::empty(traits::Reveal::All),
438                                                     def_id,
439                                                     substs).unwrap()),
440                          None)
441                     }
442                     ty::TyFnPtr(_) => {
443                         (None, Some(callee.immediate()))
444                     }
445                     _ => bug!("{} is not callable", callee.layout.ty)
446                 };
447                 let def = instance.map(|i| i.def);
448                 let sig = callee.layout.ty.fn_sig(bcx.tcx());
449                 let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig);
450                 let abi = sig.abi;
451
452                 // Handle intrinsics old trans wants Expr's for, ourselves.
453                 let intrinsic = match def {
454                     Some(ty::InstanceDef::Intrinsic(def_id))
455                         => Some(bcx.tcx().item_name(def_id)),
456                     _ => None
457                 };
458                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
459
460                 if intrinsic == Some("transmute") {
461                     let &(ref dest, target) = destination.as_ref().unwrap();
462                     self.trans_transmute(&bcx, &args[0], dest);
463                     funclet_br(self, bcx, target);
464                     return;
465                 }
466
467                 let extra_args = &args[sig.inputs().len()..];
468                 let extra_args = extra_args.iter().map(|op_arg| {
469                     let op_ty = op_arg.ty(self.mir, bcx.tcx());
470                     self.monomorphize(&op_ty)
471                 }).collect::<Vec<_>>();
472
473                 let fn_ty = match def {
474                     Some(ty::InstanceDef::Virtual(..)) => {
475                         FnType::new_vtable(bcx.ccx, sig, &extra_args)
476                     }
477                     Some(ty::InstanceDef::DropGlue(_, None)) => {
478                         // empty drop glue - a nop.
479                         let &(_, target) = destination.as_ref().unwrap();
480                         funclet_br(self, bcx, target);
481                         return;
482                     }
483                     _ => FnType::new(bcx.ccx, sig, &extra_args)
484                 };
485
486                 // The arguments we'll be passing. Plus one to account for outptr, if used.
487                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
488                 let mut llargs = Vec::with_capacity(arg_count);
489
490                 // Prepare the return value destination
491                 let ret_dest = if let Some((ref dest, _)) = *destination {
492                     let is_intrinsic = intrinsic.is_some();
493                     self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs,
494                                           is_intrinsic)
495                 } else {
496                     ReturnDest::Nothing
497                 };
498
499                 if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
500                     use intrinsic::trans_intrinsic_call;
501
502                     let dest = match ret_dest {
503                         _ if fn_ty.ret.is_indirect() => llargs[0],
504                         ReturnDest::Nothing => {
505                             C_undef(fn_ty.ret.memory_ty(bcx.ccx).ptr_to())
506                         }
507                         ReturnDest::IndirectOperand(dst, _) |
508                         ReturnDest::Store(dst) => dst.llval,
509                         ReturnDest::DirectOperand(_) =>
510                             bug!("Cannot use direct operand with an intrinsic call")
511                     };
512
513                     let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
514                         // The indices passed to simd_shuffle* in the
515                         // third argument must be constant. This is
516                         // checked by const-qualification, which also
517                         // promotes any complex rvalues to constants.
518                         if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
519                             match *arg {
520                                 mir::Operand::Copy(_) |
521                                 mir::Operand::Move(_) => {
522                                     span_bug!(span, "shuffle indices must be constant");
523                                 }
524                                 mir::Operand::Constant(ref constant) => {
525                                     let val = self.trans_constant(&bcx, constant);
526                                     return OperandRef {
527                                         val: Immediate(val.llval),
528                                         layout: bcx.ccx.layout_of(val.ty)
529                                     };
530                                 }
531                             }
532                         }
533
534                         self.trans_operand(&bcx, arg)
535                     }).collect();
536
537
538                     let callee_ty = common::instance_ty(
539                         bcx.ccx.tcx(), instance.as_ref().unwrap());
540                     trans_intrinsic_call(&bcx, callee_ty, &fn_ty, &args, dest,
541                                          terminator.source_info.span);
542
543                     if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
544                         self.store_return(&bcx, ret_dest, &fn_ty.ret, dst.llval);
545                     }
546
547                     if let Some((_, target)) = *destination {
548                         funclet_br(self, bcx, target);
549                     } else {
550                         bcx.unreachable();
551                     }
552
553                     return;
554                 }
555
556                 // Split the rust-call tupled arguments off.
557                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
558                     let (tup, args) = args.split_last().unwrap();
559                     (args, Some(tup))
560                 } else {
561                     (&args[..], None)
562                 };
563
564                 for (i, arg) in first_args.iter().enumerate() {
565                     let mut op = self.trans_operand(&bcx, arg);
566                     if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
567                         if let Pair(data_ptr, meta) = op.val {
568                             llfn = Some(meth::VirtualIndex::from_index(idx)
569                                 .get_fn(&bcx, meta, &fn_ty));
570                             llargs.push(data_ptr);
571                             continue;
572                         }
573                     }
574
575                     // The callee needs to own the argument memory if we pass it
576                     // by-ref, so make a local copy of non-immediate constants.
577                     match (arg, op.val) {
578                         (&mir::Operand::Copy(_), Ref(..)) |
579                         (&mir::Operand::Constant(_), Ref(..)) => {
580                             let tmp = LvalueRef::alloca(&bcx, op.layout, "const");
581                             op.val.store(&bcx, tmp);
582                             op.val = Ref(tmp.llval, tmp.alignment);
583                         }
584                         _ => {}
585                     }
586
587                     self.trans_argument(&bcx, op, &mut llargs, &fn_ty.args[i]);
588                 }
589                 if let Some(tup) = untuple {
590                     self.trans_arguments_untupled(&bcx, tup, &mut llargs,
591                         &fn_ty.args[first_args.len()..])
592                 }
593
594                 let fn_ptr = match (llfn, instance) {
595                     (Some(llfn), _) => llfn,
596                     (None, Some(instance)) => callee::get_fn(bcx.ccx, instance),
597                     _ => span_bug!(span, "no llfn for call"),
598                 };
599
600                 do_call(self, bcx, fn_ty, fn_ptr, &llargs,
601                         destination.as_ref().map(|&(_, target)| (ret_dest, target)),
602                         cleanup);
603             }
604             mir::TerminatorKind::GeneratorDrop |
605             mir::TerminatorKind::Yield { .. } |
606             mir::TerminatorKind::FalseEdges { .. } => bug!("generator ops in trans"),
607         }
608     }
609
610     fn trans_argument(&mut self,
611                       bcx: &Builder<'a, 'tcx>,
612                       op: OperandRef<'tcx>,
613                       llargs: &mut Vec<ValueRef>,
614                       arg: &ArgType<'tcx>) {
615         // Fill padding with undef value, where applicable.
616         if let Some(ty) = arg.pad {
617             llargs.push(C_undef(ty.llvm_type(bcx.ccx)));
618         }
619
620         if arg.is_ignore() {
621             return;
622         }
623
624         if let PassMode::Pair(..) = arg.mode {
625             match op.val {
626                 Pair(a, b) => {
627                     llargs.push(a);
628                     llargs.push(b);
629                     return;
630                 }
631                 _ => bug!("trans_argument: {:?} invalid for pair arugment", op)
632             }
633         }
634
635         // Force by-ref if we have to load through a cast pointer.
636         let (mut llval, align, by_ref) = match op.val {
637             Immediate(_) | Pair(..) => {
638                 match arg.mode {
639                     PassMode::Indirect(_) | PassMode::Cast(_) => {
640                         let scratch = LvalueRef::alloca(bcx, arg.layout, "arg");
641                         op.val.store(bcx, scratch);
642                         (scratch.llval, Alignment::AbiAligned, true)
643                     }
644                     _ => {
645                         (op.immediate_or_packed_pair(bcx), Alignment::AbiAligned, false)
646                     }
647                 }
648             }
649             Ref(llval, align @ Alignment::Packed(_)) if arg.is_indirect() => {
650                 // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
651                 // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
652                 // have scary latent bugs around.
653
654                 let scratch = LvalueRef::alloca(bcx, arg.layout, "arg");
655                 base::memcpy_ty(bcx, scratch.llval, llval, op.layout, align.non_abi());
656                 (scratch.llval, Alignment::AbiAligned, true)
657             }
658             Ref(llval, align) => (llval, align, true)
659         };
660
661         if by_ref && !arg.is_indirect() {
662             // Have to load the argument, maybe while casting it.
663             if let PassMode::Cast(ty) = arg.mode {
664                 llval = bcx.load(bcx.pointercast(llval, ty.llvm_type(bcx.ccx).ptr_to()),
665                                  (align | Alignment::Packed(arg.layout.align))
666                                     .non_abi());
667             } else {
668                 // We can't use `LvalueRef::load` here because the argument
669                 // may have a type we don't treat as immediate, but the ABI
670                 // used for this call is passing it by-value. In that case,
671                 // the load would just produce `OperandValue::Ref` instead
672                 // of the `OperandValue::Immediate` we need for the call.
673                 llval = bcx.load(llval, align.non_abi());
674                 if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
675                     if scalar.is_bool() {
676                         bcx.range_metadata(llval, 0..2);
677                     }
678                 }
679                 // We store bools as i8 so we need to truncate to i1.
680                 llval = base::to_immediate(bcx, llval, arg.layout);
681             }
682         }
683
684         llargs.push(llval);
685     }
686
687     fn trans_arguments_untupled(&mut self,
688                                 bcx: &Builder<'a, 'tcx>,
689                                 operand: &mir::Operand<'tcx>,
690                                 llargs: &mut Vec<ValueRef>,
691                                 args: &[ArgType<'tcx>]) {
692         let tuple = self.trans_operand(bcx, operand);
693
694         // Handle both by-ref and immediate tuples.
695         if let Ref(llval, align) = tuple.val {
696             let tuple_ptr = LvalueRef::new_sized(llval, tuple.layout, align);
697             for i in 0..tuple.layout.fields.count() {
698                 let field_ptr = tuple_ptr.project_field(bcx, i);
699                 self.trans_argument(bcx, field_ptr.load(bcx), llargs, &args[i]);
700             }
701         } else {
702             // If the tuple is immediate, the elements are as well.
703             for i in 0..tuple.layout.fields.count() {
704                 let op = tuple.extract_field(bcx, i);
705                 self.trans_argument(bcx, op, llargs, &args[i]);
706             }
707         }
708     }
709
710     fn get_personality_slot(&mut self, bcx: &Builder<'a, 'tcx>) -> LvalueRef<'tcx> {
711         let ccx = bcx.ccx;
712         if let Some(slot) = self.personality_slot {
713             slot
714         } else {
715             let layout = ccx.layout_of(ccx.tcx().intern_tup(&[
716                 ccx.tcx().mk_mut_ptr(ccx.tcx().types.u8),
717                 ccx.tcx().types.i32
718             ], false));
719             let slot = LvalueRef::alloca(bcx, layout, "personalityslot");
720             self.personality_slot = Some(slot);
721             slot
722         }
723     }
724
725     /// Return the landingpad wrapper around the given basic block
726     ///
727     /// No-op in MSVC SEH scheme.
728     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef {
729         if let Some(block) = self.landing_pads[target_bb] {
730             return block;
731         }
732
733         let block = self.blocks[target_bb];
734         let landing_pad = self.landing_pad_uncached(block);
735         self.landing_pads[target_bb] = Some(landing_pad);
736         landing_pad
737     }
738
739     fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
740         if base::wants_msvc_seh(self.ccx.sess()) {
741             span_bug!(self.mir.span, "landing pad was not inserted?")
742         }
743
744         let bcx = self.new_block("cleanup");
745
746         let llpersonality = self.ccx.eh_personality();
747         let llretty = self.landing_pad_type();
748         let lp = bcx.landing_pad(llretty, llpersonality, 1, self.llfn);
749         bcx.set_cleanup(lp);
750
751         let slot = self.get_personality_slot(&bcx);
752         slot.storage_live(&bcx);
753         Pair(bcx.extract_value(lp, 0), bcx.extract_value(lp, 1)).store(&bcx, slot);
754
755         bcx.br(target_bb);
756         bcx.llbb()
757     }
758
759     fn landing_pad_type(&self) -> Type {
760         let ccx = self.ccx;
761         Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false)
762     }
763
764     fn unreachable_block(&mut self) -> BasicBlockRef {
765         self.unreachable_block.unwrap_or_else(|| {
766             let bl = self.new_block("unreachable");
767             bl.unreachable();
768             self.unreachable_block = Some(bl.llbb());
769             bl.llbb()
770         })
771     }
772
773     pub fn new_block(&self, name: &str) -> Builder<'a, 'tcx> {
774         Builder::new_block(self.ccx, self.llfn, name)
775     }
776
777     pub fn get_builder(&self, bb: mir::BasicBlock) -> Builder<'a, 'tcx> {
778         let builder = Builder::with_ccx(self.ccx);
779         builder.position_at_end(self.blocks[bb]);
780         builder
781     }
782
783     fn make_return_dest(&mut self, bcx: &Builder<'a, 'tcx>,
784                         dest: &mir::Lvalue<'tcx>, fn_ret: &ArgType<'tcx>,
785                         llargs: &mut Vec<ValueRef>, is_intrinsic: bool)
786                         -> ReturnDest<'tcx> {
787         // If the return is ignored, we can just return a do-nothing ReturnDest
788         if fn_ret.is_ignore() {
789             return ReturnDest::Nothing;
790         }
791         let dest = if let mir::Lvalue::Local(index) = *dest {
792             match self.locals[index] {
793                 LocalRef::Lvalue(dest) => dest,
794                 LocalRef::Operand(None) => {
795                     // Handle temporary lvalues, specifically Operand ones, as
796                     // they don't have allocas
797                     return if fn_ret.is_indirect() {
798                         // Odd, but possible, case, we have an operand temporary,
799                         // but the calling convention has an indirect return.
800                         let tmp = LvalueRef::alloca(bcx, fn_ret.layout, "tmp_ret");
801                         tmp.storage_live(bcx);
802                         llargs.push(tmp.llval);
803                         ReturnDest::IndirectOperand(tmp, index)
804                     } else if is_intrinsic {
805                         // Currently, intrinsics always need a location to store
806                         // the result. so we create a temporary alloca for the
807                         // result
808                         let tmp = LvalueRef::alloca(bcx, fn_ret.layout, "tmp_ret");
809                         tmp.storage_live(bcx);
810                         ReturnDest::IndirectOperand(tmp, index)
811                     } else {
812                         ReturnDest::DirectOperand(index)
813                     };
814                 }
815                 LocalRef::Operand(Some(_)) => {
816                     bug!("lvalue local already assigned to");
817                 }
818             }
819         } else {
820             self.trans_lvalue(bcx, dest)
821         };
822         if fn_ret.is_indirect() {
823             match dest.alignment {
824                 Alignment::AbiAligned => {
825                     llargs.push(dest.llval);
826                     ReturnDest::Nothing
827                 },
828                 Alignment::Packed(_) => {
829                     // Currently, MIR code generation does not create calls
830                     // that store directly to fields of packed structs (in
831                     // fact, the calls it creates write only to temps),
832                     //
833                     // If someone changes that, please update this code path
834                     // to create a temporary.
835                     span_bug!(self.mir.span, "can't directly store to unaligned value");
836                 }
837             }
838         } else {
839             ReturnDest::Store(dest)
840         }
841     }
842
843     fn trans_transmute(&mut self, bcx: &Builder<'a, 'tcx>,
844                        src: &mir::Operand<'tcx>,
845                        dst: &mir::Lvalue<'tcx>) {
846         if let mir::Lvalue::Local(index) = *dst {
847             match self.locals[index] {
848                 LocalRef::Lvalue(lvalue) => self.trans_transmute_into(bcx, src, lvalue),
849                 LocalRef::Operand(None) => {
850                     let dst_layout = bcx.ccx.layout_of(self.monomorphized_lvalue_ty(dst));
851                     assert!(!dst_layout.ty.has_erasable_regions());
852                     let lvalue = LvalueRef::alloca(bcx, dst_layout, "transmute_temp");
853                     lvalue.storage_live(bcx);
854                     self.trans_transmute_into(bcx, src, lvalue);
855                     let op = lvalue.load(bcx);
856                     lvalue.storage_dead(bcx);
857                     self.locals[index] = LocalRef::Operand(Some(op));
858                 }
859                 LocalRef::Operand(Some(op)) => {
860                     assert!(op.layout.is_zst(),
861                             "assigning to initialized SSAtemp");
862                 }
863             }
864         } else {
865             let dst = self.trans_lvalue(bcx, dst);
866             self.trans_transmute_into(bcx, src, dst);
867         }
868     }
869
870     fn trans_transmute_into(&mut self, bcx: &Builder<'a, 'tcx>,
871                             src: &mir::Operand<'tcx>,
872                             dst: LvalueRef<'tcx>) {
873         let src = self.trans_operand(bcx, src);
874         let llty = src.layout.llvm_type(bcx.ccx);
875         let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to());
876         let align = src.layout.align.min(dst.layout.align);
877         src.val.store(bcx,
878             LvalueRef::new_sized(cast_ptr, src.layout, Alignment::Packed(align)));
879     }
880
881
882     // Stores the return value of a function call into it's final location.
883     fn store_return(&mut self,
884                     bcx: &Builder<'a, 'tcx>,
885                     dest: ReturnDest<'tcx>,
886                     ret_ty: &ArgType<'tcx>,
887                     llval: ValueRef) {
888         use self::ReturnDest::*;
889
890         match dest {
891             Nothing => (),
892             Store(dst) => ret_ty.store(bcx, llval, dst),
893             IndirectOperand(tmp, index) => {
894                 let op = tmp.load(bcx);
895                 tmp.storage_dead(bcx);
896                 self.locals[index] = LocalRef::Operand(Some(op));
897             }
898             DirectOperand(index) => {
899                 // If there is a cast, we have to store and reload.
900                 let op = if let PassMode::Cast(_) = ret_ty.mode {
901                     let tmp = LvalueRef::alloca(bcx, ret_ty.layout, "tmp_ret");
902                     tmp.storage_live(bcx);
903                     ret_ty.store(bcx, llval, tmp);
904                     let op = tmp.load(bcx);
905                     tmp.storage_dead(bcx);
906                     op
907                 } else {
908                     OperandRef::from_immediate_or_packed_pair(bcx, llval, ret_ty.layout)
909                 };
910                 self.locals[index] = LocalRef::Operand(Some(op));
911             }
912         }
913     }
914 }
915
916 enum ReturnDest<'tcx> {
917     // Do nothing, the return value is indirect or ignored
918     Nothing,
919     // Store the return value to the pointer
920     Store(LvalueRef<'tcx>),
921     // Stores an indirect return value to an operand local lvalue
922     IndirectOperand(LvalueRef<'tcx>, mir::Local),
923     // Stores a direct return value to an operand local lvalue
924     DirectOperand(mir::Local)
925 }