]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_ssa/mir/block.rs
Merge branch 'master' into rusty-hermit
[rust.git] / src / librustc_codegen_ssa / mir / block.rs
1 use rustc_index::vec::Idx;
2 use rustc::middle::lang_items;
3 use rustc::ty::{self, Ty, TypeFoldable, Instance};
4 use rustc::ty::layout::{self, LayoutOf, HasTyCtxt, FnTypeExt};
5 use rustc::mir::{self, Place, PlaceBase, Static, StaticKind};
6 use rustc::mir::interpret::PanicInfo;
7 use rustc_target::abi::call::{ArgType, FnType, PassMode};
8 use rustc_target::spec::abi::Abi;
9 use crate::base;
10 use crate::MemFlags;
11 use crate::common::{self, IntPredicate};
12 use crate::meth;
13
14 use crate::traits::*;
15
16 use std::borrow::Cow;
17
18 use syntax::symbol::Symbol;
19 use syntax_pos::Pos;
20
21 use super::{FunctionCx, LocalRef};
22 use super::place::PlaceRef;
23 use super::operand::OperandRef;
24 use super::operand::OperandValue::{Pair, Ref, Immediate};
25
26 /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
27 /// e.g., creating a basic block, calling a function, etc.
28 struct TerminatorCodegenHelper<'a, 'tcx> {
29     bb: &'a mir::BasicBlock,
30     terminator: &'a mir::Terminator<'tcx>,
31     funclet_bb: Option<mir::BasicBlock>,
32 }
33
34 impl<'a, 'tcx> TerminatorCodegenHelper<'a, 'tcx> {
35     /// Returns the associated funclet from `FunctionCx::funclets` for the
36     /// `funclet_bb` member if it is not `None`.
37     fn funclet<'c, 'b, Bx: BuilderMethods<'b, 'tcx>>(
38         &self,
39         fx: &'c mut FunctionCx<'b, 'tcx, Bx>,
40     ) -> Option<&'c Bx::Funclet> {
41         match self.funclet_bb {
42             Some(funcl) => fx.funclets[funcl].as_ref(),
43             None => None,
44         }
45     }
46
47     fn lltarget<'b, 'c, Bx: BuilderMethods<'b, 'tcx>>(
48         &self,
49         fx: &'c mut FunctionCx<'b, 'tcx, Bx>,
50         target: mir::BasicBlock,
51     ) -> (Bx::BasicBlock, bool) {
52         let span = self.terminator.source_info.span;
53         let lltarget = fx.blocks[target];
54         let target_funclet = fx.cleanup_kinds[target].funclet_bb(target);
55         match (self.funclet_bb, target_funclet) {
56             (None, None) => (lltarget, false),
57             (Some(f), Some(t_f)) if f == t_f || !base::wants_msvc_seh(fx.cx.tcx().sess) =>
58                 (lltarget, false),
59             // jump *into* cleanup - need a landing pad if GNU
60             (None, Some(_)) => (fx.landing_pad_to(target), false),
61             (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", self.terminator),
62             (Some(_), Some(_)) => (fx.landing_pad_to(target), true),
63         }
64     }
65
66     /// Create a basic block.
67     fn llblock<'c, 'b, Bx: BuilderMethods<'b, 'tcx>>(
68         &self,
69         fx: &'c mut FunctionCx<'b, 'tcx, Bx>,
70         target: mir::BasicBlock,
71     ) -> Bx::BasicBlock {
72         let (lltarget, is_cleanupret) = self.lltarget(fx, target);
73         if is_cleanupret {
74             // MSVC cross-funclet jump - need a trampoline
75
76             debug!("llblock: creating cleanup trampoline for {:?}", target);
77             let name = &format!("{:?}_cleanup_trampoline_{:?}", self.bb, target);
78             let mut trampoline = fx.new_block(name);
79             trampoline.cleanup_ret(self.funclet(fx).unwrap(),
80                                    Some(lltarget));
81             trampoline.llbb()
82         } else {
83             lltarget
84         }
85     }
86
87     fn funclet_br<'c, 'b, Bx: BuilderMethods<'b, 'tcx>>(
88         &self,
89         fx: &'c mut FunctionCx<'b, 'tcx, Bx>,
90         bx: &mut Bx,
91         target: mir::BasicBlock,
92     ) {
93         let (lltarget, is_cleanupret) = self.lltarget(fx, target);
94         if is_cleanupret {
95             // micro-optimization: generate a `ret` rather than a jump
96             // to a trampoline.
97             bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
98         } else {
99             bx.br(lltarget);
100         }
101     }
102
103     /// Call `fn_ptr` of `fn_ty` with the arguments `llargs`, the optional
104     /// return destination `destination` and the cleanup function `cleanup`.
105     fn do_call<'c, 'b, Bx: BuilderMethods<'b, 'tcx>>(
106         &self,
107         fx: &'c mut FunctionCx<'b, 'tcx, Bx>,
108         bx: &mut Bx,
109         fn_ty: FnType<'tcx, Ty<'tcx>>,
110         fn_ptr: Bx::Value,
111         llargs: &[Bx::Value],
112         destination: Option<(ReturnDest<'tcx, Bx::Value>, mir::BasicBlock)>,
113         cleanup: Option<mir::BasicBlock>,
114     ) {
115         if let Some(cleanup) = cleanup {
116             let ret_bx = if let Some((_, target)) = destination {
117                 fx.blocks[target]
118             } else {
119                 fx.unreachable_block()
120             };
121             let invokeret = bx.invoke(fn_ptr,
122                                       &llargs,
123                                       ret_bx,
124                                       self.llblock(fx, cleanup),
125                                       self.funclet(fx));
126             bx.apply_attrs_callsite(&fn_ty, invokeret);
127
128             if let Some((ret_dest, target)) = destination {
129                 let mut ret_bx = fx.build_block(target);
130                 fx.set_debug_loc(&mut ret_bx, self.terminator.source_info);
131                 fx.store_return(&mut ret_bx, ret_dest, &fn_ty.ret, invokeret);
132             }
133         } else {
134             let llret = bx.call(fn_ptr, &llargs, self.funclet(fx));
135             bx.apply_attrs_callsite(&fn_ty, llret);
136             if fx.mir[*self.bb].is_cleanup {
137                 // Cleanup is always the cold path. Don't inline
138                 // drop glue. Also, when there is a deeply-nested
139                 // struct, there are "symmetry" issues that cause
140                 // exponential inlining - see issue #41696.
141                 bx.do_not_inline(llret);
142             }
143
144             if let Some((ret_dest, target)) = destination {
145                 fx.store_return(bx, ret_dest, &fn_ty.ret, llret);
146                 self.funclet_br(fx, bx, target);
147             } else {
148                 bx.unreachable();
149             }
150         }
151     }
152
153     // Generate sideeffect intrinsic if jumping to any of the targets can form
154     // a loop.
155     fn maybe_sideeffect<'b, 'tcx2: 'b, Bx: BuilderMethods<'b, 'tcx2>>(
156         &self,
157         mir: &'b mir::Body<'tcx>,
158         bx: &mut Bx,
159         targets: &[mir::BasicBlock],
160     ) {
161         if bx.tcx().sess.opts.debugging_opts.insert_sideeffect {
162             if targets.iter().any(|target| {
163                 *target <= *self.bb
164                     && target
165                         .start_location()
166                         .is_predecessor_of(self.bb.start_location(), mir)
167             }) {
168                 bx.sideeffect();
169             }
170         }
171     }
172 }
173
174 /// Codegen implementations for some terminator variants.
175 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
176     /// Generates code for a `Resume` terminator.
177     fn codegen_resume_terminator<'b>(
178         &mut self,
179         helper: TerminatorCodegenHelper<'b, 'tcx>,
180         mut bx: Bx,
181     ) {
182         if let Some(funclet) = helper.funclet(self) {
183             bx.cleanup_ret(funclet, None);
184         } else {
185             let slot = self.get_personality_slot(&mut bx);
186             let lp0 = slot.project_field(&mut bx, 0);
187             let lp0 = bx.load_operand(lp0).immediate();
188             let lp1 = slot.project_field(&mut bx, 1);
189             let lp1 = bx.load_operand(lp1).immediate();
190             slot.storage_dead(&mut bx);
191
192             if !bx.sess().target.target.options.custom_unwind_resume {
193                 let mut lp = bx.const_undef(self.landing_pad_type());
194                 lp = bx.insert_value(lp, lp0, 0);
195                 lp = bx.insert_value(lp, lp1, 1);
196                 bx.resume(lp);
197             } else {
198                 bx.call(bx.eh_unwind_resume(), &[lp0],
199                         helper.funclet(self));
200                 bx.unreachable();
201             }
202         }
203     }
204
205     fn codegen_switchint_terminator<'b>(
206         &mut self,
207         helper: TerminatorCodegenHelper<'b, 'tcx>,
208         mut bx: Bx,
209         discr: &mir::Operand<'tcx>,
210         switch_ty: Ty<'tcx>,
211         values: &Cow<'tcx, [u128]>,
212         targets: &Vec<mir::BasicBlock>,
213     ) {
214         let discr = self.codegen_operand(&mut bx, &discr);
215         if targets.len() == 2 {
216             // If there are two targets, emit br instead of switch
217             let lltrue = helper.llblock(self, targets[0]);
218             let llfalse = helper.llblock(self, targets[1]);
219             if switch_ty == bx.tcx().types.bool {
220                 helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
221                 // Don't generate trivial icmps when switching on bool
222                 if let [0] = values[..] {
223                     bx.cond_br(discr.immediate(), llfalse, lltrue);
224                 } else {
225                     assert_eq!(&values[..], &[1]);
226                     bx.cond_br(discr.immediate(), lltrue, llfalse);
227                 }
228             } else {
229                 let switch_llty = bx.immediate_backend_type(
230                     bx.layout_of(switch_ty)
231                 );
232                 let llval = bx.const_uint_big(switch_llty, values[0]);
233                 let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
234                 helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
235                 bx.cond_br(cmp, lltrue, llfalse);
236             }
237         } else {
238             helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
239             let (otherwise, targets) = targets.split_last().unwrap();
240             bx.switch(
241                 discr.immediate(),
242                 helper.llblock(self, *otherwise),
243                 values.iter().zip(targets).map(|(&value, target)| {
244                     (value, helper.llblock(self, *target))
245                 })
246             );
247         }
248     }
249
250     fn codegen_return_terminator(&mut self, mut bx: Bx) {
251         // Call `va_end` if this is the definition of a C-variadic function.
252         if self.fn_ty.c_variadic {
253             // The `VaList` "spoofed" argument is just after all the real arguments.
254             let va_list_arg_idx = self.fn_ty.args.len();
255             match self.locals[mir::Local::new(1 + va_list_arg_idx)] {
256                 LocalRef::Place(va_list) => {
257                     bx.va_end(va_list.llval);
258                 }
259                 _ => bug!("C-variadic function must have a `VaList` place"),
260             }
261         }
262         if self.fn_ty.ret.layout.abi.is_uninhabited() {
263             // Functions with uninhabited return values are marked `noreturn`,
264             // so we should make sure that we never actually do.
265             bx.abort();
266             bx.unreachable();
267             return;
268         }
269         let llval = match self.fn_ty.ret.mode {
270             PassMode::Ignore | PassMode::Indirect(..) => {
271                 bx.ret_void();
272                 return;
273             }
274
275             PassMode::Direct(_) | PassMode::Pair(..) => {
276                 let op =
277                     self.codegen_consume(&mut bx, &mir::Place::return_place().as_ref());
278                 if let Ref(llval, _, align) = op.val {
279                     bx.load(llval, align)
280                 } else {
281                     op.immediate_or_packed_pair(&mut bx)
282                 }
283             }
284
285             PassMode::Cast(cast_ty) => {
286                 let op = match self.locals[mir::RETURN_PLACE] {
287                     LocalRef::Operand(Some(op)) => op,
288                     LocalRef::Operand(None) => bug!("use of return before def"),
289                     LocalRef::Place(cg_place) => {
290                         OperandRef {
291                             val: Ref(cg_place.llval, None, cg_place.align),
292                             layout: cg_place.layout
293                         }
294                     }
295                     LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
296                 };
297                 let llslot = match op.val {
298                     Immediate(_) | Pair(..) => {
299                         let scratch =
300                             PlaceRef::alloca(&mut bx, self.fn_ty.ret.layout);
301                         op.val.store(&mut bx, scratch);
302                         scratch.llval
303                     }
304                     Ref(llval, _, align) => {
305                         assert_eq!(align, op.layout.align.abi,
306                                    "return place is unaligned!");
307                         llval
308                     }
309                 };
310                 let addr = bx.pointercast(llslot, bx.type_ptr_to(
311                     bx.cast_backend_type(&cast_ty)
312                 ));
313                 bx.load(addr, self.fn_ty.ret.layout.align.abi)
314             }
315         };
316         bx.ret(llval);
317     }
318
319
320     fn codegen_drop_terminator<'b>(
321         &mut self,
322         helper: TerminatorCodegenHelper<'b, 'tcx>,
323         mut bx: Bx,
324         location: &mir::Place<'tcx>,
325         target: mir::BasicBlock,
326         unwind: Option<mir::BasicBlock>,
327     ) {
328         let ty = location.ty(self.mir, bx.tcx()).ty;
329         let ty = self.monomorphize(&ty);
330         let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty);
331
332         if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
333             // we don't actually need to drop anything.
334             helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
335             helper.funclet_br(self, &mut bx, target);
336             return
337         }
338
339         let place = self.codegen_place(&mut bx, &location.as_ref());
340         let (args1, args2);
341         let mut args = if let Some(llextra) = place.llextra {
342             args2 = [place.llval, llextra];
343             &args2[..]
344         } else {
345             args1 = [place.llval];
346             &args1[..]
347         };
348         let (drop_fn, fn_ty) = match ty.kind {
349             ty::Dynamic(..) => {
350                 let sig = drop_fn.fn_sig(self.cx.tcx());
351                 let sig = self.cx.tcx().normalize_erasing_late_bound_regions(
352                     ty::ParamEnv::reveal_all(),
353                     &sig,
354                 );
355                 let fn_ty = FnType::new_vtable(&bx, sig, &[]);
356                 let vtable = args[1];
357                 args = &args[..1];
358                 (meth::DESTRUCTOR.get_fn(&mut bx, vtable, &fn_ty), fn_ty)
359             }
360             _ => {
361                 (bx.get_fn_addr(drop_fn),
362                  FnType::of_instance(&bx, drop_fn))
363             }
364         };
365         helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
366         helper.do_call(self, &mut bx, fn_ty, drop_fn, args,
367                        Some((ReturnDest::Nothing, target)),
368                        unwind);
369     }
370
371     fn codegen_assert_terminator<'b>(
372         &mut self,
373         helper: TerminatorCodegenHelper<'b, 'tcx>,
374         mut bx: Bx,
375         terminator: &mir::Terminator<'tcx>,
376         cond: &mir::Operand<'tcx>,
377         expected: bool,
378         msg: &mir::AssertMessage<'tcx>,
379         target: mir::BasicBlock,
380         cleanup: Option<mir::BasicBlock>,
381     ) {
382         let span = terminator.source_info.span;
383         let cond = self.codegen_operand(&mut bx, cond).immediate();
384         let mut const_cond = bx.const_to_opt_u128(cond, false).map(|c| c == 1);
385
386         // This case can currently arise only from functions marked
387         // with #[rustc_inherit_overflow_checks] and inlined from
388         // another crate (mostly core::num generic/#[inline] fns),
389         // while the current crate doesn't use overflow checks.
390         // NOTE: Unlike binops, negation doesn't have its own
391         // checked operation, just a comparison with the minimum
392         // value, so we have to check for the assert message.
393         if !bx.check_overflow() {
394             if let PanicInfo::OverflowNeg = *msg {
395                 const_cond = Some(expected);
396             }
397         }
398
399         // Don't codegen the panic block if success if known.
400         if const_cond == Some(expected) {
401             helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
402             helper.funclet_br(self, &mut bx, target);
403             return;
404         }
405
406         // Pass the condition through llvm.expect for branch hinting.
407         let cond = bx.expect(cond, expected);
408
409         // Create the failure block and the conditional branch to it.
410         let lltarget = helper.llblock(self, target);
411         let panic_block = self.new_block("panic");
412         helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
413         if expected {
414             bx.cond_br(cond, lltarget, panic_block.llbb());
415         } else {
416             bx.cond_br(cond, panic_block.llbb(), lltarget);
417         }
418
419         // After this point, bx is the block for the call to panic.
420         bx = panic_block;
421         self.set_debug_loc(&mut bx, terminator.source_info);
422
423         // Get the location information.
424         let loc = bx.sess().source_map().lookup_char_pos(span.lo());
425         let filename = Symbol::intern(&loc.file.name.to_string());
426         let line = bx.const_u32(loc.line as u32);
427         let col = bx.const_u32(loc.col.to_usize() as u32 + 1);
428
429         // Put together the arguments to the panic entry point.
430         let (lang_item, args) = match msg {
431             PanicInfo::BoundsCheck { ref len, ref index } => {
432                 let len = self.codegen_operand(&mut bx, len).immediate();
433                 let index = self.codegen_operand(&mut bx, index).immediate();
434
435                 let file_line_col = bx.static_panic_msg(
436                     None,
437                     filename,
438                     line,
439                     col,
440                     "panic_bounds_check_loc",
441                 );
442                 (lang_items::PanicBoundsCheckFnLangItem,
443                     vec![file_line_col, index, len])
444             }
445             _ => {
446                 let msg_str = Symbol::intern(msg.description());
447                 let msg_file_line_col = bx.static_panic_msg(
448                     Some(msg_str),
449                     filename,
450                     line,
451                     col,
452                     "panic_loc",
453                 );
454                 (lang_items::PanicFnLangItem,
455                     vec![msg_file_line_col])
456             }
457         };
458
459         // Obtain the panic entry point.
460         let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
461         let instance = ty::Instance::mono(bx.tcx(), def_id);
462         let fn_ty = FnType::of_instance(&bx, instance);
463         let llfn = bx.get_fn_addr(instance);
464
465         // Codegen the actual panic invoke/call.
466         helper.do_call(self, &mut bx, fn_ty, llfn, &args, None, cleanup);
467     }
468
469     fn codegen_call_terminator<'b>(
470         &mut self,
471         helper: TerminatorCodegenHelper<'b, 'tcx>,
472         mut bx: Bx,
473         terminator: &mir::Terminator<'tcx>,
474         func: &mir::Operand<'tcx>,
475         args: &Vec<mir::Operand<'tcx>>,
476         destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
477         cleanup: Option<mir::BasicBlock>,
478     ) {
479         let span = terminator.source_info.span;
480         // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
481         let callee = self.codegen_operand(&mut bx, func);
482
483         let (instance, mut llfn) = match callee.layout.ty.kind {
484             ty::FnDef(def_id, substs) => {
485                 (Some(ty::Instance::resolve(bx.tcx(),
486                                             ty::ParamEnv::reveal_all(),
487                                             def_id,
488                                             substs).unwrap()),
489                  None)
490             }
491             ty::FnPtr(_) => {
492                 (None, Some(callee.immediate()))
493             }
494             _ => bug!("{} is not callable", callee.layout.ty),
495         };
496         let def = instance.map(|i| i.def);
497         let sig = callee.layout.ty.fn_sig(bx.tcx());
498         let sig = bx.tcx().normalize_erasing_late_bound_regions(
499             ty::ParamEnv::reveal_all(),
500             &sig,
501         );
502         let abi = sig.abi;
503
504         // Handle intrinsics old codegen wants Expr's for, ourselves.
505         let intrinsic = match def {
506             Some(ty::InstanceDef::Intrinsic(def_id)) =>
507                 Some(bx.tcx().item_name(def_id).as_str()),
508             _ => None
509         };
510         let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
511
512         if intrinsic == Some("transmute") {
513             if let Some(destination_ref) = destination.as_ref() {
514                 let &(ref dest, target) = destination_ref;
515                 self.codegen_transmute(&mut bx, &args[0], dest);
516                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
517                 helper.funclet_br(self, &mut bx, target);
518             } else {
519                 // If we are trying to transmute to an uninhabited type,
520                 // it is likely there is no allotted destination. In fact,
521                 // transmuting to an uninhabited type is UB, which means
522                 // we can do what we like. Here, we declare that transmuting
523                 // into an uninhabited type is impossible, so anything following
524                 // it must be unreachable.
525                 assert_eq!(bx.layout_of(sig.output()).abi, layout::Abi::Uninhabited);
526                 bx.unreachable();
527             }
528             return;
529         }
530
531         let extra_args = &args[sig.inputs().len()..];
532         let extra_args = extra_args.iter().map(|op_arg| {
533             let op_ty = op_arg.ty(self.mir, bx.tcx());
534             self.monomorphize(&op_ty)
535         }).collect::<Vec<_>>();
536
537         let fn_ty = match def {
538             Some(ty::InstanceDef::Virtual(..)) => {
539                 FnType::new_vtable(&bx, sig, &extra_args)
540             }
541             Some(ty::InstanceDef::DropGlue(_, None)) => {
542                 // Empty drop glue; a no-op.
543                 let &(_, target) = destination.as_ref().unwrap();
544                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
545                 helper.funclet_br(self, &mut bx, target);
546                 return;
547             }
548             _ => FnType::new(&bx, sig, &extra_args)
549         };
550
551         // Emit a panic or a no-op for `panic_if_uninhabited`.
552         if intrinsic == Some("panic_if_uninhabited") {
553             let ty = instance.unwrap().substs.type_at(0);
554             let layout = bx.layout_of(ty);
555             if layout.abi.is_uninhabited() {
556                 let loc = bx.sess().source_map().lookup_char_pos(span.lo());
557                 let filename = Symbol::intern(&loc.file.name.to_string());
558                 let line = bx.const_u32(loc.line as u32);
559                 let col = bx.const_u32(loc.col.to_usize() as u32 + 1);
560
561                 let str = format!(
562                     "Attempted to instantiate uninhabited type {}",
563                     ty
564                 );
565                 let msg_str = Symbol::intern(&str);
566                 let msg_file_line_col = bx.static_panic_msg(
567                     Some(msg_str),
568                     filename,
569                     line,
570                     col,
571                     "panic_loc",
572                 );
573
574                 // Obtain the panic entry point.
575                 let def_id =
576                     common::langcall(bx.tcx(), Some(span), "", lang_items::PanicFnLangItem);
577                 let instance = ty::Instance::mono(bx.tcx(), def_id);
578                 let fn_ty = FnType::of_instance(&bx, instance);
579                 let llfn = bx.get_fn_addr(instance);
580
581                 if let Some((_, target)) = destination.as_ref() {
582                     helper.maybe_sideeffect(self.mir, &mut bx, &[*target]);
583                 }
584                 // Codegen the actual panic invoke/call.
585                 helper.do_call(
586                     self,
587                     &mut bx,
588                     fn_ty,
589                     llfn,
590                     &[msg_file_line_col],
591                     destination.as_ref().map(|(_, bb)| (ReturnDest::Nothing, *bb)),
592                     cleanup,
593                 );
594             } else {
595                 // a NOP
596                 let target = destination.as_ref().unwrap().1;
597                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
598                 helper.funclet_br(self, &mut bx, target);
599             }
600             return;
601         }
602
603         // The arguments we'll be passing. Plus one to account for outptr, if used.
604         let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
605         let mut llargs = Vec::with_capacity(arg_count);
606
607         // Prepare the return value destination
608         let ret_dest = if let Some((ref dest, _)) = *destination {
609             let is_intrinsic = intrinsic.is_some();
610             self.make_return_dest(&mut bx, dest, &fn_ty.ret, &mut llargs,
611                                   is_intrinsic)
612         } else {
613             ReturnDest::Nothing
614         };
615
616         if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
617             let dest = match ret_dest {
618                 _ if fn_ty.ret.is_indirect() => llargs[0],
619                 ReturnDest::Nothing =>
620                     bx.const_undef(bx.type_ptr_to(bx.memory_ty(&fn_ty.ret))),
621                 ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) =>
622                     dst.llval,
623                 ReturnDest::DirectOperand(_) =>
624                     bug!("Cannot use direct operand with an intrinsic call"),
625             };
626
627             let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
628                 // The indices passed to simd_shuffle* in the
629                 // third argument must be constant. This is
630                 // checked by const-qualification, which also
631                 // promotes any complex rvalues to constants.
632                 if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
633                     match *arg {
634                         // The shuffle array argument is usually not an explicit constant,
635                         // but specified directly in the code. This means it gets promoted
636                         // and we can then extract the value by evaluating the promoted.
637                         mir::Operand::Copy(
638                             Place {
639                                 base: PlaceBase::Static(box Static {
640                                     kind: StaticKind::Promoted(promoted, _),
641                                     ty,
642                                     def_id: _,
643                                 }),
644                                 projection: box [],
645                             }
646                         ) |
647                         mir::Operand::Move(
648                             Place {
649                                 base: PlaceBase::Static(box Static {
650                                     kind: StaticKind::Promoted(promoted, _),
651                                     ty,
652                                     def_id: _,
653                                 }),
654                                 projection: box [],
655                             }
656                         ) => {
657                             let param_env = ty::ParamEnv::reveal_all();
658                             let cid = mir::interpret::GlobalId {
659                                 instance: self.instance,
660                                 promoted: Some(promoted),
661                             };
662                             let c = bx.tcx().const_eval(param_env.and(cid));
663                             let (llval, ty) = self.simd_shuffle_indices(
664                                 &bx,
665                                 terminator.source_info.span,
666                                 ty,
667                                 c,
668                             );
669                             return OperandRef {
670                                 val: Immediate(llval),
671                                 layout: bx.layout_of(ty),
672                             };
673
674                         }
675                         mir::Operand::Copy(_) |
676                         mir::Operand::Move(_) => {
677                             span_bug!(span, "shuffle indices must be constant");
678                         }
679                         mir::Operand::Constant(ref constant) => {
680                             let c = self.eval_mir_constant(constant);
681                             let (llval, ty) = self.simd_shuffle_indices(
682                                 &bx,
683                                 constant.span,
684                                 constant.literal.ty,
685                                 c,
686                             );
687                             return OperandRef {
688                                 val: Immediate(llval),
689                                 layout: bx.layout_of(ty)
690                             };
691                         }
692                     }
693                 }
694
695                 self.codegen_operand(&mut bx, arg)
696             }).collect();
697
698
699             bx.codegen_intrinsic_call(*instance.as_ref().unwrap(), &fn_ty, &args, dest,
700                                       terminator.source_info.span);
701
702             if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
703                 self.store_return(&mut bx, ret_dest, &fn_ty.ret, dst.llval);
704             }
705
706             if let Some((_, target)) = *destination {
707                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
708                 helper.funclet_br(self, &mut bx, target);
709             } else {
710                 bx.unreachable();
711             }
712
713             return;
714         }
715
716         // Split the rust-call tupled arguments off.
717         let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
718             let (tup, args) = args.split_last().unwrap();
719             (args, Some(tup))
720         } else {
721             (&args[..], None)
722         };
723
724         'make_args: for (i, arg) in first_args.iter().enumerate() {
725             let mut op = self.codegen_operand(&mut bx, arg);
726
727             if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
728                 if let Pair(..) = op.val {
729                     // In the case of Rc<Self>, we need to explicitly pass a
730                     // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
731                     // that is understood elsewhere in the compiler as a method on
732                     // `dyn Trait`.
733                     // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
734                     // we get a value of a built-in pointer type
735                     'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
736                                     && !op.layout.ty.is_region_ptr()
737                     {
738                         'iter_fields: for i in 0..op.layout.fields.count() {
739                             let field = op.extract_field(&mut bx, i);
740                             if !field.layout.is_zst() {
741                                 // we found the one non-zero-sized field that is allowed
742                                 // now find *its* non-zero-sized field, or stop if it's a
743                                 // pointer
744                                 op = field;
745                                 continue 'descend_newtypes
746                             }
747                         }
748
749                         span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
750                     }
751
752                     // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
753                     // data pointer and vtable. Look up the method in the vtable, and pass
754                     // the data pointer as the first argument
755                     match op.val {
756                         Pair(data_ptr, meta) => {
757                             llfn = Some(meth::VirtualIndex::from_index(idx)
758                                 .get_fn(&mut bx, meta, &fn_ty));
759                             llargs.push(data_ptr);
760                             continue 'make_args
761                         }
762                         other => bug!("expected a Pair, got {:?}", other),
763                     }
764                 } else if let Ref(data_ptr, Some(meta), _) = op.val {
765                     // by-value dynamic dispatch
766                     llfn = Some(meth::VirtualIndex::from_index(idx)
767                         .get_fn(&mut bx, meta, &fn_ty));
768                     llargs.push(data_ptr);
769                     continue;
770                 } else {
771                     span_bug!(span, "can't codegen a virtual call on {:?}", op);
772                 }
773             }
774
775             // The callee needs to own the argument memory if we pass it
776             // by-ref, so make a local copy of non-immediate constants.
777             match (arg, op.val) {
778                 (&mir::Operand::Copy(_), Ref(_, None, _)) |
779                 (&mir::Operand::Constant(_), Ref(_, None, _)) => {
780                     let tmp = PlaceRef::alloca(&mut bx, op.layout);
781                     op.val.store(&mut bx, tmp);
782                     op.val = Ref(tmp.llval, None, tmp.align);
783                 }
784                 _ => {}
785             }
786
787             self.codegen_argument(&mut bx, op, &mut llargs, &fn_ty.args[i]);
788         }
789         if let Some(tup) = untuple {
790             self.codegen_arguments_untupled(&mut bx, tup, &mut llargs,
791                 &fn_ty.args[first_args.len()..])
792         }
793
794         let fn_ptr = match (llfn, instance) {
795             (Some(llfn), _) => llfn,
796             (None, Some(instance)) => bx.get_fn_addr(instance),
797             _ => span_bug!(span, "no llfn for call"),
798         };
799
800         if let Some((_, target)) = destination.as_ref() {
801             helper.maybe_sideeffect(self.mir, &mut bx, &[*target]);
802         }
803         helper.do_call(self, &mut bx, fn_ty, fn_ptr, &llargs,
804                        destination.as_ref().map(|&(_, target)| (ret_dest, target)),
805                        cleanup);
806     }
807 }
808
809 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
810     pub fn codegen_block(
811         &mut self,
812         bb: mir::BasicBlock,
813     ) {
814         let mut bx = self.build_block(bb);
815         let data = &self.mir[bb];
816
817         debug!("codegen_block({:?}={:?})", bb, data);
818
819         for statement in &data.statements {
820             bx = self.codegen_statement(bx, statement);
821         }
822
823         self.codegen_terminator(bx, bb, data.terminator());
824     }
825
826     fn codegen_terminator(
827         &mut self,
828         mut bx: Bx,
829         bb: mir::BasicBlock,
830         terminator: &mir::Terminator<'tcx>
831     ) {
832         debug!("codegen_terminator: {:?}", terminator);
833
834         // Create the cleanup bundle, if needed.
835         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
836         let helper = TerminatorCodegenHelper {
837             bb: &bb, terminator, funclet_bb
838         };
839
840         self.set_debug_loc(&mut bx, terminator.source_info);
841         match terminator.kind {
842             mir::TerminatorKind::Resume => {
843                 self.codegen_resume_terminator(helper, bx)
844             }
845
846             mir::TerminatorKind::Abort => {
847                 bx.abort();
848                 bx.unreachable();
849             }
850
851             mir::TerminatorKind::Goto { target } => {
852                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
853                 helper.funclet_br(self, &mut bx, target);
854             }
855
856             mir::TerminatorKind::SwitchInt {
857                 ref discr, switch_ty, ref values, ref targets
858             } => {
859                 self.codegen_switchint_terminator(helper, bx, discr, switch_ty,
860                                                   values, targets);
861             }
862
863             mir::TerminatorKind::Return => {
864                 self.codegen_return_terminator(bx);
865             }
866
867             mir::TerminatorKind::Unreachable => {
868                 bx.unreachable();
869             }
870
871             mir::TerminatorKind::Drop { ref location, target, unwind } => {
872                 self.codegen_drop_terminator(helper, bx, location, target, unwind);
873             }
874
875             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
876                 self.codegen_assert_terminator(helper, bx, terminator, cond,
877                                                expected, msg, target, cleanup);
878             }
879
880             mir::TerminatorKind::DropAndReplace { .. } => {
881                 bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
882             }
883
884             mir::TerminatorKind::Call {
885                 ref func,
886                 ref args,
887                 ref destination,
888                 cleanup,
889                 from_hir_call: _
890             } => {
891                 self.codegen_call_terminator(helper, bx, terminator, func,
892                                              args, destination, cleanup);
893             }
894             mir::TerminatorKind::GeneratorDrop |
895             mir::TerminatorKind::Yield { .. } => bug!("generator ops in codegen"),
896             mir::TerminatorKind::FalseEdges { .. } |
897             mir::TerminatorKind::FalseUnwind { .. } => bug!("borrowck false edges in codegen"),
898         }
899     }
900
901     fn codegen_argument(
902         &mut self,
903         bx: &mut Bx,
904         op: OperandRef<'tcx, Bx::Value>,
905         llargs: &mut Vec<Bx::Value>,
906         arg: &ArgType<'tcx, Ty<'tcx>>
907     ) {
908         // Fill padding with undef value, where applicable.
909         if let Some(ty) = arg.pad {
910             llargs.push(bx.const_undef(bx.reg_backend_type(&ty)))
911         }
912
913         if arg.is_ignore() {
914             return;
915         }
916
917         if let PassMode::Pair(..) = arg.mode {
918             match op.val {
919                 Pair(a, b) => {
920                     llargs.push(a);
921                     llargs.push(b);
922                     return;
923                 }
924                 _ => bug!("codegen_argument: {:?} invalid for pair argument", op)
925             }
926         } else if arg.is_unsized_indirect() {
927             match op.val {
928                 Ref(a, Some(b), _) => {
929                     llargs.push(a);
930                     llargs.push(b);
931                     return;
932                 }
933                 _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op)
934             }
935         }
936
937         // Force by-ref if we have to load through a cast pointer.
938         let (mut llval, align, by_ref) = match op.val {
939             Immediate(_) | Pair(..) => {
940                 match arg.mode {
941                     PassMode::Indirect(..) | PassMode::Cast(_) => {
942                         let scratch = PlaceRef::alloca(bx, arg.layout);
943                         op.val.store(bx, scratch);
944                         (scratch.llval, scratch.align, true)
945                     }
946                     _ => {
947                         (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false)
948                     }
949                 }
950             }
951             Ref(llval, _, align) => {
952                 if arg.is_indirect() && align < arg.layout.align.abi {
953                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
954                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
955                     // have scary latent bugs around.
956
957                     let scratch = PlaceRef::alloca(bx, arg.layout);
958                     base::memcpy_ty(bx, scratch.llval, scratch.align, llval, align,
959                                     op.layout, MemFlags::empty());
960                     (scratch.llval, scratch.align, true)
961                 } else {
962                     (llval, align, true)
963                 }
964             }
965         };
966
967         if by_ref && !arg.is_indirect() {
968             // Have to load the argument, maybe while casting it.
969             if let PassMode::Cast(ty) = arg.mode {
970                 let addr = bx.pointercast(llval, bx.type_ptr_to(
971                     bx.cast_backend_type(&ty))
972                 );
973                 llval = bx.load(addr, align.min(arg.layout.align.abi));
974             } else {
975                 // We can't use `PlaceRef::load` here because the argument
976                 // may have a type we don't treat as immediate, but the ABI
977                 // used for this call is passing it by-value. In that case,
978                 // the load would just produce `OperandValue::Ref` instead
979                 // of the `OperandValue::Immediate` we need for the call.
980                 llval = bx.load(llval, align);
981                 if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
982                     if scalar.is_bool() {
983                         bx.range_metadata(llval, 0..2);
984                     }
985                 }
986                 // We store bools as `i8` so we need to truncate to `i1`.
987                 llval = base::to_immediate(bx, llval, arg.layout);
988             }
989         }
990
991         llargs.push(llval);
992     }
993
994     fn codegen_arguments_untupled(
995         &mut self,
996         bx: &mut Bx,
997         operand: &mir::Operand<'tcx>,
998         llargs: &mut Vec<Bx::Value>,
999         args: &[ArgType<'tcx, Ty<'tcx>>]
1000     ) {
1001         let tuple = self.codegen_operand(bx, operand);
1002
1003         // Handle both by-ref and immediate tuples.
1004         if let Ref(llval, None, align) = tuple.val {
1005             let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align);
1006             for i in 0..tuple.layout.fields.count() {
1007                 let field_ptr = tuple_ptr.project_field(bx, i);
1008                 let field = bx.load_operand(field_ptr);
1009                 self.codegen_argument(bx, field, llargs, &args[i]);
1010             }
1011         } else if let Ref(_, Some(_), _) = tuple.val {
1012             bug!("closure arguments must be sized")
1013         } else {
1014             // If the tuple is immediate, the elements are as well.
1015             for i in 0..tuple.layout.fields.count() {
1016                 let op = tuple.extract_field(bx, i);
1017                 self.codegen_argument(bx, op, llargs, &args[i]);
1018             }
1019         }
1020     }
1021
1022     fn get_personality_slot(
1023         &mut self,
1024         bx: &mut Bx
1025     ) -> PlaceRef<'tcx, Bx::Value> {
1026         let cx = bx.cx();
1027         if let Some(slot) = self.personality_slot {
1028             slot
1029         } else {
1030             let layout = cx.layout_of(cx.tcx().intern_tup(&[
1031                 cx.tcx().mk_mut_ptr(cx.tcx().types.u8),
1032                 cx.tcx().types.i32
1033             ]));
1034             let slot = PlaceRef::alloca(bx, layout);
1035             self.personality_slot = Some(slot);
1036             slot
1037         }
1038     }
1039
1040     /// Returns the landing-pad wrapper around the given basic block.
1041     ///
1042     /// No-op in MSVC SEH scheme.
1043     fn landing_pad_to(
1044         &mut self,
1045         target_bb: mir::BasicBlock
1046     ) -> Bx::BasicBlock {
1047         if let Some(block) = self.landing_pads[target_bb] {
1048             return block;
1049         }
1050
1051         let block = self.blocks[target_bb];
1052         let landing_pad = self.landing_pad_uncached(block);
1053         self.landing_pads[target_bb] = Some(landing_pad);
1054         landing_pad
1055     }
1056
1057     fn landing_pad_uncached(
1058         &mut self,
1059         target_bb: Bx::BasicBlock
1060     ) -> Bx::BasicBlock {
1061         if base::wants_msvc_seh(self.cx.sess()) {
1062             span_bug!(self.mir.span, "landing pad was not inserted?")
1063         }
1064
1065         let mut bx = self.new_block("cleanup");
1066
1067         let llpersonality = self.cx.eh_personality();
1068         let llretty = self.landing_pad_type();
1069         let lp = bx.landing_pad(llretty, llpersonality, 1);
1070         bx.set_cleanup(lp);
1071
1072         let slot = self.get_personality_slot(&mut bx);
1073         slot.storage_live(&mut bx);
1074         Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&mut bx, slot);
1075
1076         bx.br(target_bb);
1077         bx.llbb()
1078     }
1079
1080     fn landing_pad_type(&self) -> Bx::Type {
1081         let cx = self.cx;
1082         cx.type_struct(&[cx.type_i8p(), cx.type_i32()], false)
1083     }
1084
1085     fn unreachable_block(
1086         &mut self
1087     ) -> Bx::BasicBlock {
1088         self.unreachable_block.unwrap_or_else(|| {
1089             let mut bx = self.new_block("unreachable");
1090             bx.unreachable();
1091             self.unreachable_block = Some(bx.llbb());
1092             bx.llbb()
1093         })
1094     }
1095
1096     pub fn new_block(&self, name: &str) -> Bx {
1097         Bx::new_block(self.cx, self.llfn, name)
1098     }
1099
1100     pub fn build_block(
1101         &self,
1102         bb: mir::BasicBlock
1103     ) -> Bx {
1104         let mut bx = Bx::with_cx(self.cx);
1105         bx.position_at_end(self.blocks[bb]);
1106         bx
1107     }
1108
1109     fn make_return_dest(
1110         &mut self,
1111         bx: &mut Bx,
1112         dest: &mir::Place<'tcx>,
1113         fn_ret: &ArgType<'tcx, Ty<'tcx>>,
1114         llargs: &mut Vec<Bx::Value>, is_intrinsic: bool
1115     ) -> ReturnDest<'tcx, Bx::Value> {
1116         // If the return is ignored, we can just return a do-nothing `ReturnDest`.
1117         if fn_ret.is_ignore() {
1118             return ReturnDest::Nothing;
1119         }
1120         let dest = if let mir::Place {
1121             base: mir::PlaceBase::Local(index),
1122             projection: box [],
1123         } = *dest {
1124             match self.locals[index] {
1125                 LocalRef::Place(dest) => dest,
1126                 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
1127                 LocalRef::Operand(None) => {
1128                     // Handle temporary places, specifically `Operand` ones, as
1129                     // they don't have `alloca`s.
1130                     return if fn_ret.is_indirect() {
1131                         // Odd, but possible, case, we have an operand temporary,
1132                         // but the calling convention has an indirect return.
1133                         let tmp = PlaceRef::alloca(bx, fn_ret.layout);
1134                         tmp.storage_live(bx);
1135                         llargs.push(tmp.llval);
1136                         ReturnDest::IndirectOperand(tmp, index)
1137                     } else if is_intrinsic {
1138                         // Currently, intrinsics always need a location to store
1139                         // the result, so we create a temporary `alloca` for the
1140                         // result.
1141                         let tmp = PlaceRef::alloca(bx, fn_ret.layout);
1142                         tmp.storage_live(bx);
1143                         ReturnDest::IndirectOperand(tmp, index)
1144                     } else {
1145                         ReturnDest::DirectOperand(index)
1146                     };
1147                 }
1148                 LocalRef::Operand(Some(_)) => {
1149                     bug!("place local already assigned to");
1150                 }
1151             }
1152         } else {
1153             self.codegen_place(bx, &mir::PlaceRef {
1154                 base: &dest.base,
1155                 projection: &dest.projection,
1156             })
1157         };
1158         if fn_ret.is_indirect() {
1159             if dest.align < dest.layout.align.abi {
1160                 // Currently, MIR code generation does not create calls
1161                 // that store directly to fields of packed structs (in
1162                 // fact, the calls it creates write only to temps).
1163                 //
1164                 // If someone changes that, please update this code path
1165                 // to create a temporary.
1166                 span_bug!(self.mir.span, "can't directly store to unaligned value");
1167             }
1168             llargs.push(dest.llval);
1169             ReturnDest::Nothing
1170         } else {
1171             ReturnDest::Store(dest)
1172         }
1173     }
1174
1175     fn codegen_transmute(
1176         &mut self,
1177         bx: &mut Bx,
1178         src: &mir::Operand<'tcx>,
1179         dst: &mir::Place<'tcx>
1180     ) {
1181         if let mir::Place {
1182             base: mir::PlaceBase::Local(index),
1183             projection: box [],
1184         } = *dst {
1185             match self.locals[index] {
1186                 LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
1187                 LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"),
1188                 LocalRef::Operand(None) => {
1189                     let dst_layout = bx.layout_of(self.monomorphized_place_ty(&dst.as_ref()));
1190                     assert!(!dst_layout.ty.has_erasable_regions());
1191                     let place = PlaceRef::alloca(bx, dst_layout);
1192                     place.storage_live(bx);
1193                     self.codegen_transmute_into(bx, src, place);
1194                     let op = bx.load_operand(place);
1195                     place.storage_dead(bx);
1196                     self.locals[index] = LocalRef::Operand(Some(op));
1197                 }
1198                 LocalRef::Operand(Some(op)) => {
1199                     assert!(op.layout.is_zst(),
1200                             "assigning to initialized SSAtemp");
1201                 }
1202             }
1203         } else {
1204             let dst = self.codegen_place(bx, &dst.as_ref());
1205             self.codegen_transmute_into(bx, src, dst);
1206         }
1207     }
1208
1209     fn codegen_transmute_into(
1210         &mut self,
1211         bx: &mut Bx,
1212         src: &mir::Operand<'tcx>,
1213         dst: PlaceRef<'tcx, Bx::Value>
1214     ) {
1215         let src = self.codegen_operand(bx, src);
1216         let llty = bx.backend_type(src.layout);
1217         let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty));
1218         let align = src.layout.align.abi.min(dst.align);
1219         src.val.store(bx, PlaceRef::new_sized_aligned(cast_ptr, src.layout, align));
1220     }
1221
1222
1223     // Stores the return value of a function call into it's final location.
1224     fn store_return(
1225         &mut self,
1226         bx: &mut Bx,
1227         dest: ReturnDest<'tcx, Bx::Value>,
1228         ret_ty: &ArgType<'tcx, Ty<'tcx>>,
1229         llval: Bx::Value
1230     ) {
1231         use self::ReturnDest::*;
1232
1233         match dest {
1234             Nothing => (),
1235             Store(dst) => bx.store_arg_ty(&ret_ty, llval, dst),
1236             IndirectOperand(tmp, index) => {
1237                 let op = bx.load_operand(tmp);
1238                 tmp.storage_dead(bx);
1239                 self.locals[index] = LocalRef::Operand(Some(op));
1240             }
1241             DirectOperand(index) => {
1242                 // If there is a cast, we have to store and reload.
1243                 let op = if let PassMode::Cast(_) = ret_ty.mode {
1244                     let tmp = PlaceRef::alloca(bx, ret_ty.layout);
1245                     tmp.storage_live(bx);
1246                     bx.store_arg_ty(&ret_ty, llval, tmp);
1247                     let op = bx.load_operand(tmp);
1248                     tmp.storage_dead(bx);
1249                     op
1250                 } else {
1251                     OperandRef::from_immediate_or_packed_pair(bx, llval, ret_ty.layout)
1252                 };
1253                 self.locals[index] = LocalRef::Operand(Some(op));
1254             }
1255         }
1256     }
1257 }
1258
1259 enum ReturnDest<'tcx, V> {
1260     // Do nothing; the return value is indirect or ignored.
1261     Nothing,
1262     // Store the return value to the pointer.
1263     Store(PlaceRef<'tcx, V>),
1264     // Store an indirect return value to an operand local place.
1265     IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
1266     // Store a direct return value to an operand local place.
1267     DirectOperand(mir::Local)
1268 }