]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_ssa/src/mir/block.rs
Auto merge of #76047 - Dylan-DPC:rename/maybe, r=RalfJung
[rust.git] / compiler / rustc_codegen_ssa / src / mir / block.rs
1 use super::operand::OperandRef;
2 use super::operand::OperandValue::{Immediate, Pair, Ref};
3 use super::place::PlaceRef;
4 use super::{FunctionCx, LocalRef};
5
6 use crate::base;
7 use crate::common::{self, IntPredicate};
8 use crate::meth;
9 use crate::traits::*;
10 use crate::MemFlags;
11
12 use rustc_ast as ast;
13 use rustc_hir::lang_items::LangItem;
14 use rustc_index::vec::Idx;
15 use rustc_middle::mir;
16 use rustc_middle::mir::interpret::{AllocId, ConstValue, Pointer, Scalar};
17 use rustc_middle::mir::AssertKind;
18 use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
19 use rustc_middle::ty::{self, Instance, Ty, TypeFoldable};
20 use rustc_span::source_map::Span;
21 use rustc_span::{sym, Symbol};
22 use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode};
23 use rustc_target::abi::{self, LayoutOf};
24 use rustc_target::spec::abi::Abi;
25
26 use std::borrow::Cow;
27
28 /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
29 /// e.g., creating a basic block, calling a function, etc.
30 struct TerminatorCodegenHelper<'tcx> {
31     bb: mir::BasicBlock,
32     terminator: &'tcx mir::Terminator<'tcx>,
33     funclet_bb: Option<mir::BasicBlock>,
34 }
35
36 impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
37     /// Returns the associated funclet from `FunctionCx::funclets` for the
38     /// `funclet_bb` member if it is not `None`.
39     fn funclet<'b, Bx: BuilderMethods<'a, 'tcx>>(
40         &self,
41         fx: &'b mut FunctionCx<'a, 'tcx, Bx>,
42     ) -> Option<&'b Bx::Funclet> {
43         match self.funclet_bb {
44             Some(funcl) => fx.funclets[funcl].as_ref(),
45             None => None,
46         }
47     }
48
49     fn lltarget<Bx: BuilderMethods<'a, 'tcx>>(
50         &self,
51         fx: &mut FunctionCx<'a, 'tcx, Bx>,
52         target: mir::BasicBlock,
53     ) -> (Bx::BasicBlock, bool) {
54         let span = self.terminator.source_info.span;
55         let lltarget = fx.blocks[target];
56         let target_funclet = fx.cleanup_kinds[target].funclet_bb(target);
57         match (self.funclet_bb, target_funclet) {
58             (None, None) => (lltarget, false),
59             (Some(f), Some(t_f)) if f == t_f || !base::wants_msvc_seh(fx.cx.tcx().sess) => {
60                 (lltarget, false)
61             }
62             // jump *into* cleanup - need a landing pad if GNU
63             (None, Some(_)) => (fx.landing_pad_to(target), false),
64             (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", self.terminator),
65             (Some(_), Some(_)) => (fx.landing_pad_to(target), true),
66         }
67     }
68
69     /// Create a basic block.
70     fn llblock<Bx: BuilderMethods<'a, 'tcx>>(
71         &self,
72         fx: &mut FunctionCx<'a, 'tcx, Bx>,
73         target: mir::BasicBlock,
74     ) -> Bx::BasicBlock {
75         let (lltarget, is_cleanupret) = self.lltarget(fx, target);
76         if is_cleanupret {
77             // MSVC cross-funclet jump - need a trampoline
78
79             debug!("llblock: creating cleanup trampoline for {:?}", target);
80             let name = &format!("{:?}_cleanup_trampoline_{:?}", self.bb, target);
81             let mut trampoline = fx.new_block(name);
82             trampoline.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
83             trampoline.llbb()
84         } else {
85             lltarget
86         }
87     }
88
89     fn funclet_br<Bx: BuilderMethods<'a, 'tcx>>(
90         &self,
91         fx: &mut FunctionCx<'a, 'tcx, Bx>,
92         bx: &mut Bx,
93         target: mir::BasicBlock,
94     ) {
95         let (lltarget, is_cleanupret) = self.lltarget(fx, target);
96         if is_cleanupret {
97             // micro-optimization: generate a `ret` rather than a jump
98             // to a trampoline.
99             bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
100         } else {
101             bx.br(lltarget);
102         }
103     }
104
105     /// Call `fn_ptr` of `fn_abi` with the arguments `llargs`, the optional
106     /// return destination `destination` and the cleanup function `cleanup`.
107     fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
108         &self,
109         fx: &mut FunctionCx<'a, 'tcx, Bx>,
110         bx: &mut Bx,
111         fn_abi: FnAbi<'tcx, Ty<'tcx>>,
112         fn_ptr: Bx::Value,
113         llargs: &[Bx::Value],
114         destination: Option<(ReturnDest<'tcx, Bx::Value>, mir::BasicBlock)>,
115         cleanup: Option<mir::BasicBlock>,
116     ) {
117         // If there is a cleanup block and the function we're calling can unwind, then
118         // do an invoke, otherwise do a call.
119         if let Some(cleanup) = cleanup.filter(|_| fn_abi.can_unwind) {
120             let ret_bx = if let Some((_, target)) = destination {
121                 fx.blocks[target]
122             } else {
123                 fx.unreachable_block()
124             };
125             let invokeret =
126                 bx.invoke(fn_ptr, &llargs, ret_bx, self.llblock(fx, cleanup), self.funclet(fx));
127             bx.apply_attrs_callsite(&fn_abi, invokeret);
128
129             if let Some((ret_dest, target)) = destination {
130                 let mut ret_bx = fx.build_block(target);
131                 fx.set_debug_loc(&mut ret_bx, self.terminator.source_info);
132                 fx.store_return(&mut ret_bx, ret_dest, &fn_abi.ret, invokeret);
133             }
134         } else {
135             let llret = bx.call(fn_ptr, &llargs, self.funclet(fx));
136             bx.apply_attrs_callsite(&fn_abi, llret);
137             if fx.mir[self.bb].is_cleanup {
138                 // Cleanup is always the cold path. Don't inline
139                 // drop glue. Also, when there is a deeply-nested
140                 // struct, there are "symmetry" issues that cause
141                 // exponential inlining - see issue #41696.
142                 bx.do_not_inline(llret);
143             }
144
145             if let Some((ret_dest, target)) = destination {
146                 fx.store_return(bx, ret_dest, &fn_abi.ret, llret);
147                 self.funclet_br(fx, bx, target);
148             } else {
149                 bx.unreachable();
150             }
151         }
152     }
153
154     // Generate sideeffect intrinsic if jumping to any of the targets can form
155     // a loop.
156     fn maybe_sideeffect<Bx: BuilderMethods<'a, 'tcx>>(
157         &self,
158         mir: &'tcx mir::Body<'tcx>,
159         bx: &mut Bx,
160         targets: &[mir::BasicBlock],
161     ) {
162         if bx.tcx().sess.opts.debugging_opts.insert_sideeffect {
163             if targets.iter().any(|&target| {
164                 target <= self.bb
165                     && target.start_location().is_predecessor_of(self.bb.start_location(), mir)
166             }) {
167                 bx.sideeffect();
168             }
169         }
170     }
171 }
172
173 /// Codegen implementations for some terminator variants.
174 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
175     /// Generates code for a `Resume` terminator.
176     fn codegen_resume_terminator(&mut self, helper: TerminatorCodegenHelper<'tcx>, mut bx: Bx) {
177         if let Some(funclet) = helper.funclet(self) {
178             bx.cleanup_ret(funclet, None);
179         } else {
180             let slot = self.get_personality_slot(&mut bx);
181             let lp0 = slot.project_field(&mut bx, 0);
182             let lp0 = bx.load_operand(lp0).immediate();
183             let lp1 = slot.project_field(&mut bx, 1);
184             let lp1 = bx.load_operand(lp1).immediate();
185             slot.storage_dead(&mut bx);
186
187             let mut lp = bx.const_undef(self.landing_pad_type());
188             lp = bx.insert_value(lp, lp0, 0);
189             lp = bx.insert_value(lp, lp1, 1);
190             bx.resume(lp);
191         }
192     }
193
194     fn codegen_switchint_terminator(
195         &mut self,
196         helper: TerminatorCodegenHelper<'tcx>,
197         mut bx: Bx,
198         discr: &mir::Operand<'tcx>,
199         switch_ty: Ty<'tcx>,
200         values: &Cow<'tcx, [u128]>,
201         targets: &Vec<mir::BasicBlock>,
202     ) {
203         let discr = self.codegen_operand(&mut bx, &discr);
204         // `switch_ty` is redundant, sanity-check that.
205         assert_eq!(discr.layout.ty, switch_ty);
206         if targets.len() == 2 {
207             // If there are two targets, emit br instead of switch
208             let lltrue = helper.llblock(self, targets[0]);
209             let llfalse = helper.llblock(self, targets[1]);
210             if switch_ty == bx.tcx().types.bool {
211                 helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
212                 // Don't generate trivial icmps when switching on bool
213                 if let [0] = values[..] {
214                     bx.cond_br(discr.immediate(), llfalse, lltrue);
215                 } else {
216                     assert_eq!(&values[..], &[1]);
217                     bx.cond_br(discr.immediate(), lltrue, llfalse);
218                 }
219             } else {
220                 let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
221                 let llval = bx.const_uint_big(switch_llty, values[0]);
222                 let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
223                 helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
224                 bx.cond_br(cmp, lltrue, llfalse);
225             }
226         } else {
227             helper.maybe_sideeffect(self.mir, &mut bx, targets.as_slice());
228             let (otherwise, targets) = targets.split_last().unwrap();
229             bx.switch(
230                 discr.immediate(),
231                 helper.llblock(self, *otherwise),
232                 values
233                     .iter()
234                     .zip(targets)
235                     .map(|(&value, target)| (value, helper.llblock(self, *target))),
236             );
237         }
238     }
239
240     fn codegen_return_terminator(&mut self, mut bx: Bx) {
241         // Call `va_end` if this is the definition of a C-variadic function.
242         if self.fn_abi.c_variadic {
243             // The `VaList` "spoofed" argument is just after all the real arguments.
244             let va_list_arg_idx = self.fn_abi.args.len();
245             match self.locals[mir::Local::new(1 + va_list_arg_idx)] {
246                 LocalRef::Place(va_list) => {
247                     bx.va_end(va_list.llval);
248                 }
249                 _ => bug!("C-variadic function must have a `VaList` place"),
250             }
251         }
252         if self.fn_abi.ret.layout.abi.is_uninhabited() {
253             // Functions with uninhabited return values are marked `noreturn`,
254             // so we should make sure that we never actually do.
255             // We play it safe by using a well-defined `abort`, but we could go for immediate UB
256             // if that turns out to be helpful.
257             bx.abort();
258             // `abort` does not terminate the block, so we still need to generate
259             // an `unreachable` terminator after it.
260             bx.unreachable();
261             return;
262         }
263         let llval = match self.fn_abi.ret.mode {
264             PassMode::Ignore | PassMode::Indirect(..) => {
265                 bx.ret_void();
266                 return;
267             }
268
269             PassMode::Direct(_) | PassMode::Pair(..) => {
270                 let op = self.codegen_consume(&mut bx, mir::Place::return_place().as_ref());
271                 if let Ref(llval, _, align) = op.val {
272                     bx.load(llval, align)
273                 } else {
274                     op.immediate_or_packed_pair(&mut bx)
275                 }
276             }
277
278             PassMode::Cast(cast_ty) => {
279                 let op = match self.locals[mir::RETURN_PLACE] {
280                     LocalRef::Operand(Some(op)) => op,
281                     LocalRef::Operand(None) => bug!("use of return before def"),
282                     LocalRef::Place(cg_place) => OperandRef {
283                         val: Ref(cg_place.llval, None, cg_place.align),
284                         layout: cg_place.layout,
285                     },
286                     LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
287                 };
288                 let llslot = match op.val {
289                     Immediate(_) | Pair(..) => {
290                         let scratch = PlaceRef::alloca(&mut bx, self.fn_abi.ret.layout);
291                         op.val.store(&mut bx, scratch);
292                         scratch.llval
293                     }
294                     Ref(llval, _, align) => {
295                         assert_eq!(align, op.layout.align.abi, "return place is unaligned!");
296                         llval
297                     }
298                 };
299                 let addr = bx.pointercast(llslot, bx.type_ptr_to(bx.cast_backend_type(&cast_ty)));
300                 bx.load(addr, self.fn_abi.ret.layout.align.abi)
301             }
302         };
303         bx.ret(llval);
304     }
305
306     fn codegen_drop_terminator(
307         &mut self,
308         helper: TerminatorCodegenHelper<'tcx>,
309         mut bx: Bx,
310         location: mir::Place<'tcx>,
311         target: mir::BasicBlock,
312         unwind: Option<mir::BasicBlock>,
313     ) {
314         let ty = location.ty(self.mir, bx.tcx()).ty;
315         let ty = self.monomorphize(&ty);
316         let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty);
317
318         if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
319             // we don't actually need to drop anything.
320             helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
321             helper.funclet_br(self, &mut bx, target);
322             return;
323         }
324
325         let place = self.codegen_place(&mut bx, location.as_ref());
326         let (args1, args2);
327         let mut args = if let Some(llextra) = place.llextra {
328             args2 = [place.llval, llextra];
329             &args2[..]
330         } else {
331             args1 = [place.llval];
332             &args1[..]
333         };
334         let (drop_fn, fn_abi) = match ty.kind {
335             // FIXME(eddyb) perhaps move some of this logic into
336             // `Instance::resolve_drop_in_place`?
337             ty::Dynamic(..) => {
338                 let virtual_drop = Instance {
339                     def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
340                     substs: drop_fn.substs,
341                 };
342                 let fn_abi = FnAbi::of_instance(&bx, virtual_drop, &[]);
343                 let vtable = args[1];
344                 args = &args[..1];
345                 (meth::DESTRUCTOR.get_fn(&mut bx, vtable, &fn_abi), fn_abi)
346             }
347             _ => (bx.get_fn_addr(drop_fn), FnAbi::of_instance(&bx, drop_fn, &[])),
348         };
349         helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
350         helper.do_call(
351             self,
352             &mut bx,
353             fn_abi,
354             drop_fn,
355             args,
356             Some((ReturnDest::Nothing, target)),
357             unwind,
358         );
359     }
360
361     fn codegen_assert_terminator(
362         &mut self,
363         helper: TerminatorCodegenHelper<'tcx>,
364         mut bx: Bx,
365         terminator: &mir::Terminator<'tcx>,
366         cond: &mir::Operand<'tcx>,
367         expected: bool,
368         msg: &mir::AssertMessage<'tcx>,
369         target: mir::BasicBlock,
370         cleanup: Option<mir::BasicBlock>,
371     ) {
372         let span = terminator.source_info.span;
373         let cond = self.codegen_operand(&mut bx, cond).immediate();
374         let mut const_cond = bx.const_to_opt_u128(cond, false).map(|c| c == 1);
375
376         // This case can currently arise only from functions marked
377         // with #[rustc_inherit_overflow_checks] and inlined from
378         // another crate (mostly core::num generic/#[inline] fns),
379         // while the current crate doesn't use overflow checks.
380         // NOTE: Unlike binops, negation doesn't have its own
381         // checked operation, just a comparison with the minimum
382         // value, so we have to check for the assert message.
383         if !bx.check_overflow() {
384             if let AssertKind::OverflowNeg(_) = *msg {
385                 const_cond = Some(expected);
386             }
387         }
388
389         // Don't codegen the panic block if success if known.
390         if const_cond == Some(expected) {
391             helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
392             helper.funclet_br(self, &mut bx, target);
393             return;
394         }
395
396         // Pass the condition through llvm.expect for branch hinting.
397         let cond = bx.expect(cond, expected);
398
399         // Create the failure block and the conditional branch to it.
400         let lltarget = helper.llblock(self, target);
401         let panic_block = self.new_block("panic");
402         helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
403         if expected {
404             bx.cond_br(cond, lltarget, panic_block.llbb());
405         } else {
406             bx.cond_br(cond, panic_block.llbb(), lltarget);
407         }
408
409         // After this point, bx is the block for the call to panic.
410         bx = panic_block;
411         self.set_debug_loc(&mut bx, terminator.source_info);
412
413         // Get the location information.
414         let location = self.get_caller_location(&mut bx, span).immediate();
415
416         // Put together the arguments to the panic entry point.
417         let (lang_item, args) = match msg {
418             AssertKind::BoundsCheck { ref len, ref index } => {
419                 let len = self.codegen_operand(&mut bx, len).immediate();
420                 let index = self.codegen_operand(&mut bx, index).immediate();
421                 // It's `fn panic_bounds_check(index: usize, len: usize)`,
422                 // and `#[track_caller]` adds an implicit third argument.
423                 (LangItem::PanicBoundsCheck, vec![index, len, location])
424             }
425             _ => {
426                 let msg_str = Symbol::intern(msg.description());
427                 let msg = bx.const_str(msg_str);
428                 // It's `pub fn panic(expr: &str)`, with the wide reference being passed
429                 // as two arguments, and `#[track_caller]` adds an implicit third argument.
430                 (LangItem::Panic, vec![msg.0, msg.1, location])
431             }
432         };
433
434         // Obtain the panic entry point.
435         let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
436         let instance = ty::Instance::mono(bx.tcx(), def_id);
437         let fn_abi = FnAbi::of_instance(&bx, instance, &[]);
438         let llfn = bx.get_fn_addr(instance);
439
440         // Codegen the actual panic invoke/call.
441         helper.do_call(self, &mut bx, fn_abi, llfn, &args, None, cleanup);
442     }
443
444     /// Returns `true` if this is indeed a panic intrinsic and codegen is done.
445     fn codegen_panic_intrinsic(
446         &mut self,
447         helper: &TerminatorCodegenHelper<'tcx>,
448         bx: &mut Bx,
449         intrinsic: Option<Symbol>,
450         instance: Option<Instance<'tcx>>,
451         span: Span,
452         destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
453         cleanup: Option<mir::BasicBlock>,
454     ) -> bool {
455         // Emit a panic or a no-op for `assert_*` intrinsics.
456         // These are intrinsics that compile to panics so that we can get a message
457         // which mentions the offending type, even from a const context.
458         #[derive(Debug, PartialEq)]
459         enum AssertIntrinsic {
460             Inhabited,
461             ZeroValid,
462             UninitValid,
463         };
464         let panic_intrinsic = intrinsic.and_then(|i| match i {
465             sym::assert_inhabited => Some(AssertIntrinsic::Inhabited),
466             sym::assert_zero_valid => Some(AssertIntrinsic::ZeroValid),
467             sym::assert_uninit_valid => Some(AssertIntrinsic::UninitValid),
468             _ => None,
469         });
470         if let Some(intrinsic) = panic_intrinsic {
471             use AssertIntrinsic::*;
472             let ty = instance.unwrap().substs.type_at(0);
473             let layout = bx.layout_of(ty);
474             let do_panic = match intrinsic {
475                 Inhabited => layout.abi.is_uninhabited(),
476                 // We unwrap as the error type is `!`.
477                 ZeroValid => !layout.might_permit_raw_init(bx, /*zero:*/ true).unwrap(),
478                 // We unwrap as the error type is `!`.
479                 UninitValid => !layout.might_permit_raw_init(bx, /*zero:*/ false).unwrap(),
480             };
481             if do_panic {
482                 let msg_str = if layout.abi.is_uninhabited() {
483                     // Use this error even for the other intrinsics as it is more precise.
484                     format!("attempted to instantiate uninhabited type `{}`", ty)
485                 } else if intrinsic == ZeroValid {
486                     format!("attempted to zero-initialize type `{}`, which is invalid", ty)
487                 } else {
488                     format!("attempted to leave type `{}` uninitialized, which is invalid", ty)
489                 };
490                 let msg = bx.const_str(Symbol::intern(&msg_str));
491                 let location = self.get_caller_location(bx, span).immediate();
492
493                 // Obtain the panic entry point.
494                 // FIXME: dedup this with `codegen_assert_terminator` above.
495                 let def_id = common::langcall(bx.tcx(), Some(span), "", LangItem::Panic);
496                 let instance = ty::Instance::mono(bx.tcx(), def_id);
497                 let fn_abi = FnAbi::of_instance(bx, instance, &[]);
498                 let llfn = bx.get_fn_addr(instance);
499
500                 if let Some((_, target)) = destination.as_ref() {
501                     helper.maybe_sideeffect(self.mir, bx, &[*target]);
502                 }
503                 // Codegen the actual panic invoke/call.
504                 helper.do_call(
505                     self,
506                     bx,
507                     fn_abi,
508                     llfn,
509                     &[msg.0, msg.1, location],
510                     destination.as_ref().map(|(_, bb)| (ReturnDest::Nothing, *bb)),
511                     cleanup,
512                 );
513             } else {
514                 // a NOP
515                 let target = destination.as_ref().unwrap().1;
516                 helper.maybe_sideeffect(self.mir, bx, &[target]);
517                 helper.funclet_br(self, bx, target)
518             }
519             true
520         } else {
521             false
522         }
523     }
524
525     fn codegen_call_terminator(
526         &mut self,
527         helper: TerminatorCodegenHelper<'tcx>,
528         mut bx: Bx,
529         terminator: &mir::Terminator<'tcx>,
530         func: &mir::Operand<'tcx>,
531         args: &Vec<mir::Operand<'tcx>>,
532         destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
533         cleanup: Option<mir::BasicBlock>,
534         fn_span: Span,
535     ) {
536         let span = terminator.source_info.span;
537         // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
538         let callee = self.codegen_operand(&mut bx, func);
539
540         let (instance, mut llfn) = match callee.layout.ty.kind {
541             ty::FnDef(def_id, substs) => (
542                 Some(
543                     ty::Instance::resolve(bx.tcx(), ty::ParamEnv::reveal_all(), def_id, substs)
544                         .unwrap()
545                         .unwrap()
546                         .polymorphize(bx.tcx()),
547                 ),
548                 None,
549             ),
550             ty::FnPtr(_) => (None, Some(callee.immediate())),
551             _ => bug!("{} is not callable", callee.layout.ty),
552         };
553         let def = instance.map(|i| i.def);
554
555         if let Some(ty::InstanceDef::DropGlue(_, None)) = def {
556             // Empty drop glue; a no-op.
557             let &(_, target) = destination.as_ref().unwrap();
558             helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
559             helper.funclet_br(self, &mut bx, target);
560             return;
561         }
562
563         // FIXME(eddyb) avoid computing this if possible, when `instance` is
564         // available - right now `sig` is only needed for getting the `abi`
565         // and figuring out how many extra args were passed to a C-variadic `fn`.
566         let sig = callee.layout.ty.fn_sig(bx.tcx());
567         let abi = sig.abi();
568
569         // Handle intrinsics old codegen wants Expr's for, ourselves.
570         let intrinsic = match def {
571             Some(ty::InstanceDef::Intrinsic(def_id)) => Some(bx.tcx().item_name(def_id)),
572             _ => None,
573         };
574
575         let extra_args = &args[sig.inputs().skip_binder().len()..];
576         let extra_args = extra_args
577             .iter()
578             .map(|op_arg| {
579                 let op_ty = op_arg.ty(self.mir, bx.tcx());
580                 self.monomorphize(&op_ty)
581             })
582             .collect::<Vec<_>>();
583
584         let fn_abi = match instance {
585             Some(instance) => FnAbi::of_instance(&bx, instance, &extra_args),
586             None => FnAbi::of_fn_ptr(&bx, sig, &extra_args),
587         };
588
589         if intrinsic == Some(sym::transmute) {
590             if let Some(destination_ref) = destination.as_ref() {
591                 let &(dest, target) = destination_ref;
592                 self.codegen_transmute(&mut bx, &args[0], dest);
593                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
594                 helper.funclet_br(self, &mut bx, target);
595             } else {
596                 // If we are trying to transmute to an uninhabited type,
597                 // it is likely there is no allotted destination. In fact,
598                 // transmuting to an uninhabited type is UB, which means
599                 // we can do what we like. Here, we declare that transmuting
600                 // into an uninhabited type is impossible, so anything following
601                 // it must be unreachable.
602                 assert_eq!(fn_abi.ret.layout.abi, abi::Abi::Uninhabited);
603                 bx.unreachable();
604             }
605             return;
606         }
607
608         if self.codegen_panic_intrinsic(
609             &helper,
610             &mut bx,
611             intrinsic,
612             instance,
613             span,
614             destination,
615             cleanup,
616         ) {
617             return;
618         }
619
620         // The arguments we'll be passing. Plus one to account for outptr, if used.
621         let arg_count = fn_abi.args.len() + fn_abi.ret.is_indirect() as usize;
622         let mut llargs = Vec::with_capacity(arg_count);
623
624         // Prepare the return value destination
625         let ret_dest = if let Some((dest, _)) = *destination {
626             let is_intrinsic = intrinsic.is_some();
627             self.make_return_dest(&mut bx, dest, &fn_abi.ret, &mut llargs, is_intrinsic)
628         } else {
629             ReturnDest::Nothing
630         };
631
632         if intrinsic == Some(sym::caller_location) {
633             if let Some((_, target)) = destination.as_ref() {
634                 let location = self.get_caller_location(&mut bx, fn_span);
635
636                 if let ReturnDest::IndirectOperand(tmp, _) = ret_dest {
637                     location.val.store(&mut bx, tmp);
638                 }
639                 self.store_return(&mut bx, ret_dest, &fn_abi.ret, location.immediate());
640
641                 helper.maybe_sideeffect(self.mir, &mut bx, &[*target]);
642                 helper.funclet_br(self, &mut bx, *target);
643             }
644             return;
645         }
646
647         if intrinsic.is_some() && intrinsic != Some(sym::drop_in_place) {
648             let intrinsic = intrinsic.unwrap();
649             let dest = match ret_dest {
650                 _ if fn_abi.ret.is_indirect() => llargs[0],
651                 ReturnDest::Nothing => {
652                     bx.const_undef(bx.type_ptr_to(bx.arg_memory_ty(&fn_abi.ret)))
653                 }
654                 ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) => dst.llval,
655                 ReturnDest::DirectOperand(_) => {
656                     bug!("Cannot use direct operand with an intrinsic call")
657                 }
658             };
659
660             let args: Vec<_> = args
661                 .iter()
662                 .enumerate()
663                 .map(|(i, arg)| {
664                     // The indices passed to simd_shuffle* in the
665                     // third argument must be constant. This is
666                     // checked by const-qualification, which also
667                     // promotes any complex rvalues to constants.
668                     if i == 2 && intrinsic.as_str().starts_with("simd_shuffle") {
669                         if let mir::Operand::Constant(constant) = arg {
670                             let c = self.eval_mir_constant(constant);
671                             let (llval, ty) = self.simd_shuffle_indices(
672                                 &bx,
673                                 constant.span,
674                                 constant.literal.ty,
675                                 c,
676                             );
677                             return OperandRef { val: Immediate(llval), layout: bx.layout_of(ty) };
678                         } else {
679                             span_bug!(span, "shuffle indices must be constant");
680                         }
681                     }
682
683                     self.codegen_operand(&mut bx, arg)
684                 })
685                 .collect();
686
687             bx.codegen_intrinsic_call(
688                 *instance.as_ref().unwrap(),
689                 &fn_abi,
690                 &args,
691                 dest,
692                 terminator.source_info.span,
693             );
694
695             if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
696                 self.store_return(&mut bx, ret_dest, &fn_abi.ret, dst.llval);
697             }
698
699             if let Some((_, target)) = *destination {
700                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
701                 helper.funclet_br(self, &mut bx, target);
702             } else {
703                 bx.unreachable();
704             }
705
706             return;
707         }
708
709         // Split the rust-call tupled arguments off.
710         let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
711             let (tup, args) = args.split_last().unwrap();
712             (args, Some(tup))
713         } else {
714             (&args[..], None)
715         };
716
717         'make_args: for (i, arg) in first_args.iter().enumerate() {
718             let mut op = self.codegen_operand(&mut bx, arg);
719
720             if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
721                 if let Pair(..) = op.val {
722                     // In the case of Rc<Self>, we need to explicitly pass a
723                     // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
724                     // that is understood elsewhere in the compiler as a method on
725                     // `dyn Trait`.
726                     // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
727                     // we get a value of a built-in pointer type
728                     'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
729                         && !op.layout.ty.is_region_ptr()
730                     {
731                         for i in 0..op.layout.fields.count() {
732                             let field = op.extract_field(&mut bx, i);
733                             if !field.layout.is_zst() {
734                                 // we found the one non-zero-sized field that is allowed
735                                 // now find *its* non-zero-sized field, or stop if it's a
736                                 // pointer
737                                 op = field;
738                                 continue 'descend_newtypes;
739                             }
740                         }
741
742                         span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
743                     }
744
745                     // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
746                     // data pointer and vtable. Look up the method in the vtable, and pass
747                     // the data pointer as the first argument
748                     match op.val {
749                         Pair(data_ptr, meta) => {
750                             llfn = Some(
751                                 meth::VirtualIndex::from_index(idx).get_fn(&mut bx, meta, &fn_abi),
752                             );
753                             llargs.push(data_ptr);
754                             continue 'make_args;
755                         }
756                         other => bug!("expected a Pair, got {:?}", other),
757                     }
758                 } else if let Ref(data_ptr, Some(meta), _) = op.val {
759                     // by-value dynamic dispatch
760                     llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(&mut bx, meta, &fn_abi));
761                     llargs.push(data_ptr);
762                     continue;
763                 } else {
764                     span_bug!(span, "can't codegen a virtual call on {:?}", op);
765                 }
766             }
767
768             // The callee needs to own the argument memory if we pass it
769             // by-ref, so make a local copy of non-immediate constants.
770             match (arg, op.val) {
771                 (&mir::Operand::Copy(_), Ref(_, None, _))
772                 | (&mir::Operand::Constant(_), Ref(_, None, _)) => {
773                     let tmp = PlaceRef::alloca(&mut bx, op.layout);
774                     op.val.store(&mut bx, tmp);
775                     op.val = Ref(tmp.llval, None, tmp.align);
776                 }
777                 _ => {}
778             }
779
780             self.codegen_argument(&mut bx, op, &mut llargs, &fn_abi.args[i]);
781         }
782         if let Some(tup) = untuple {
783             self.codegen_arguments_untupled(
784                 &mut bx,
785                 tup,
786                 &mut llargs,
787                 &fn_abi.args[first_args.len()..],
788             )
789         }
790
791         let needs_location =
792             instance.map_or(false, |i| i.def.requires_caller_location(self.cx.tcx()));
793         if needs_location {
794             assert_eq!(
795                 fn_abi.args.len(),
796                 args.len() + 1,
797                 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR",
798             );
799             let location = self.get_caller_location(&mut bx, fn_span);
800             debug!(
801                 "codegen_call_terminator({:?}): location={:?} (fn_span {:?})",
802                 terminator, location, fn_span
803             );
804
805             let last_arg = fn_abi.args.last().unwrap();
806             self.codegen_argument(&mut bx, location, &mut llargs, last_arg);
807         }
808
809         let fn_ptr = match (llfn, instance) {
810             (Some(llfn), _) => llfn,
811             (None, Some(instance)) => bx.get_fn_addr(instance),
812             _ => span_bug!(span, "no llfn for call"),
813         };
814
815         if let Some((_, target)) = destination.as_ref() {
816             helper.maybe_sideeffect(self.mir, &mut bx, &[*target]);
817         }
818         helper.do_call(
819             self,
820             &mut bx,
821             fn_abi,
822             fn_ptr,
823             &llargs,
824             destination.as_ref().map(|&(_, target)| (ret_dest, target)),
825             cleanup,
826         );
827     }
828
829     fn codegen_asm_terminator(
830         &mut self,
831         helper: TerminatorCodegenHelper<'tcx>,
832         mut bx: Bx,
833         terminator: &mir::Terminator<'tcx>,
834         template: &[ast::InlineAsmTemplatePiece],
835         operands: &[mir::InlineAsmOperand<'tcx>],
836         options: ast::InlineAsmOptions,
837         line_spans: &[Span],
838         destination: Option<mir::BasicBlock>,
839     ) {
840         let span = terminator.source_info.span;
841
842         let operands: Vec<_> = operands
843             .iter()
844             .map(|op| match *op {
845                 mir::InlineAsmOperand::In { reg, ref value } => {
846                     let value = self.codegen_operand(&mut bx, value);
847                     InlineAsmOperandRef::In { reg, value }
848                 }
849                 mir::InlineAsmOperand::Out { reg, late, ref place } => {
850                     let place = place.map(|place| self.codegen_place(&mut bx, place.as_ref()));
851                     InlineAsmOperandRef::Out { reg, late, place }
852                 }
853                 mir::InlineAsmOperand::InOut { reg, late, ref in_value, ref out_place } => {
854                     let in_value = self.codegen_operand(&mut bx, in_value);
855                     let out_place =
856                         out_place.map(|out_place| self.codegen_place(&mut bx, out_place.as_ref()));
857                     InlineAsmOperandRef::InOut { reg, late, in_value, out_place }
858                 }
859                 mir::InlineAsmOperand::Const { ref value } => {
860                     if let mir::Operand::Constant(constant) = value {
861                         let const_value = self
862                             .eval_mir_constant(constant)
863                             .unwrap_or_else(|_| span_bug!(span, "asm const cannot be resolved"));
864                         let ty = constant.literal.ty;
865                         let size = bx.layout_of(ty).size;
866                         let scalar = match const_value {
867                             // Promoted constants are evaluated into a ByRef instead of a Scalar,
868                             // but we want the scalar value here.
869                             ConstValue::ByRef { alloc, offset } => {
870                                 let ptr = Pointer::new(AllocId(0), offset);
871                                 alloc
872                                     .read_scalar(&bx, ptr, size)
873                                     .and_then(|s| s.check_init())
874                                     .unwrap_or_else(|e| {
875                                         bx.tcx().sess.span_err(
876                                             span,
877                                             &format!("Could not evaluate asm const: {}", e),
878                                         );
879
880                                         // We are erroring out, just emit a dummy constant.
881                                         Scalar::from_u64(0)
882                                     })
883                             }
884                             _ => span_bug!(span, "expected ByRef for promoted asm const"),
885                         };
886                         let value = scalar.assert_bits(size);
887                         let string = match ty.kind {
888                             ty::Uint(_) => value.to_string(),
889                             ty::Int(int_ty) => {
890                                 match int_ty.normalize(bx.tcx().sess.target.ptr_width) {
891                                     ast::IntTy::I8 => (value as i8).to_string(),
892                                     ast::IntTy::I16 => (value as i16).to_string(),
893                                     ast::IntTy::I32 => (value as i32).to_string(),
894                                     ast::IntTy::I64 => (value as i64).to_string(),
895                                     ast::IntTy::I128 => (value as i128).to_string(),
896                                     ast::IntTy::Isize => unreachable!(),
897                                 }
898                             }
899                             ty::Float(ast::FloatTy::F32) => {
900                                 f32::from_bits(value as u32).to_string()
901                             }
902                             ty::Float(ast::FloatTy::F64) => {
903                                 f64::from_bits(value as u64).to_string()
904                             }
905                             _ => span_bug!(span, "asm const has bad type {}", ty),
906                         };
907                         InlineAsmOperandRef::Const { string }
908                     } else {
909                         span_bug!(span, "asm const is not a constant");
910                     }
911                 }
912                 mir::InlineAsmOperand::SymFn { ref value } => {
913                     let literal = self.monomorphize(&value.literal);
914                     if let ty::FnDef(def_id, substs) = literal.ty.kind {
915                         let instance = ty::Instance::resolve_for_fn_ptr(
916                             bx.tcx(),
917                             ty::ParamEnv::reveal_all(),
918                             def_id,
919                             substs,
920                         )
921                         .unwrap();
922                         InlineAsmOperandRef::SymFn { instance }
923                     } else {
924                         span_bug!(span, "invalid type for asm sym (fn)");
925                     }
926                 }
927                 mir::InlineAsmOperand::SymStatic { def_id } => {
928                     InlineAsmOperandRef::SymStatic { def_id }
929                 }
930             })
931             .collect();
932
933         bx.codegen_inline_asm(template, &operands, options, line_spans);
934
935         if let Some(target) = destination {
936             helper.funclet_br(self, &mut bx, target);
937         } else {
938             bx.unreachable();
939         }
940     }
941 }
942
943 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
944     pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
945         let mut bx = self.build_block(bb);
946         let mir = self.mir;
947         let data = &mir[bb];
948
949         debug!("codegen_block({:?}={:?})", bb, data);
950
951         for statement in &data.statements {
952             bx = self.codegen_statement(bx, statement);
953         }
954
955         self.codegen_terminator(bx, bb, data.terminator());
956     }
957
958     fn codegen_terminator(
959         &mut self,
960         mut bx: Bx,
961         bb: mir::BasicBlock,
962         terminator: &'tcx mir::Terminator<'tcx>,
963     ) {
964         debug!("codegen_terminator: {:?}", terminator);
965
966         // Create the cleanup bundle, if needed.
967         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
968         let helper = TerminatorCodegenHelper { bb, terminator, funclet_bb };
969
970         self.set_debug_loc(&mut bx, terminator.source_info);
971         match terminator.kind {
972             mir::TerminatorKind::Resume => self.codegen_resume_terminator(helper, bx),
973
974             mir::TerminatorKind::Abort => {
975                 bx.abort();
976                 // `abort` does not terminate the block, so we still need to generate
977                 // an `unreachable` terminator after it.
978                 bx.unreachable();
979             }
980
981             mir::TerminatorKind::Goto { target } => {
982                 helper.maybe_sideeffect(self.mir, &mut bx, &[target]);
983                 helper.funclet_br(self, &mut bx, target);
984             }
985
986             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
987                 self.codegen_switchint_terminator(helper, bx, discr, switch_ty, values, targets);
988             }
989
990             mir::TerminatorKind::Return => {
991                 self.codegen_return_terminator(bx);
992             }
993
994             mir::TerminatorKind::Unreachable => {
995                 bx.unreachable();
996             }
997
998             mir::TerminatorKind::Drop { place, target, unwind } => {
999                 self.codegen_drop_terminator(helper, bx, place, target, unwind);
1000             }
1001
1002             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
1003                 self.codegen_assert_terminator(
1004                     helper, bx, terminator, cond, expected, msg, target, cleanup,
1005                 );
1006             }
1007
1008             mir::TerminatorKind::DropAndReplace { .. } => {
1009                 bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
1010             }
1011
1012             mir::TerminatorKind::Call {
1013                 ref func,
1014                 ref args,
1015                 ref destination,
1016                 cleanup,
1017                 from_hir_call: _,
1018                 fn_span,
1019             } => {
1020                 self.codegen_call_terminator(
1021                     helper,
1022                     bx,
1023                     terminator,
1024                     func,
1025                     args,
1026                     destination,
1027                     cleanup,
1028                     fn_span,
1029                 );
1030             }
1031             mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Yield { .. } => {
1032                 bug!("generator ops in codegen")
1033             }
1034             mir::TerminatorKind::FalseEdge { .. } | mir::TerminatorKind::FalseUnwind { .. } => {
1035                 bug!("borrowck false edges in codegen")
1036             }
1037
1038             mir::TerminatorKind::InlineAsm {
1039                 template,
1040                 ref operands,
1041                 options,
1042                 line_spans,
1043                 destination,
1044             } => {
1045                 self.codegen_asm_terminator(
1046                     helper,
1047                     bx,
1048                     terminator,
1049                     template,
1050                     operands,
1051                     options,
1052                     line_spans,
1053                     destination,
1054                 );
1055             }
1056         }
1057     }
1058
1059     fn codegen_argument(
1060         &mut self,
1061         bx: &mut Bx,
1062         op: OperandRef<'tcx, Bx::Value>,
1063         llargs: &mut Vec<Bx::Value>,
1064         arg: &ArgAbi<'tcx, Ty<'tcx>>,
1065     ) {
1066         // Fill padding with undef value, where applicable.
1067         if let Some(ty) = arg.pad {
1068             llargs.push(bx.const_undef(bx.reg_backend_type(&ty)))
1069         }
1070
1071         if arg.is_ignore() {
1072             return;
1073         }
1074
1075         if let PassMode::Pair(..) = arg.mode {
1076             match op.val {
1077                 Pair(a, b) => {
1078                     llargs.push(a);
1079                     llargs.push(b);
1080                     return;
1081                 }
1082                 _ => bug!("codegen_argument: {:?} invalid for pair argument", op),
1083             }
1084         } else if arg.is_unsized_indirect() {
1085             match op.val {
1086                 Ref(a, Some(b), _) => {
1087                     llargs.push(a);
1088                     llargs.push(b);
1089                     return;
1090                 }
1091                 _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op),
1092             }
1093         }
1094
1095         // Force by-ref if we have to load through a cast pointer.
1096         let (mut llval, align, by_ref) = match op.val {
1097             Immediate(_) | Pair(..) => match arg.mode {
1098                 PassMode::Indirect(..) | PassMode::Cast(_) => {
1099                     let scratch = PlaceRef::alloca(bx, arg.layout);
1100                     op.val.store(bx, scratch);
1101                     (scratch.llval, scratch.align, true)
1102                 }
1103                 _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false),
1104             },
1105             Ref(llval, _, align) => {
1106                 if arg.is_indirect() && align < arg.layout.align.abi {
1107                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
1108                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
1109                     // have scary latent bugs around.
1110
1111                     let scratch = PlaceRef::alloca(bx, arg.layout);
1112                     base::memcpy_ty(
1113                         bx,
1114                         scratch.llval,
1115                         scratch.align,
1116                         llval,
1117                         align,
1118                         op.layout,
1119                         MemFlags::empty(),
1120                     );
1121                     (scratch.llval, scratch.align, true)
1122                 } else {
1123                     (llval, align, true)
1124                 }
1125             }
1126         };
1127
1128         if by_ref && !arg.is_indirect() {
1129             // Have to load the argument, maybe while casting it.
1130             if let PassMode::Cast(ty) = arg.mode {
1131                 let addr = bx.pointercast(llval, bx.type_ptr_to(bx.cast_backend_type(&ty)));
1132                 llval = bx.load(addr, align.min(arg.layout.align.abi));
1133             } else {
1134                 // We can't use `PlaceRef::load` here because the argument
1135                 // may have a type we don't treat as immediate, but the ABI
1136                 // used for this call is passing it by-value. In that case,
1137                 // the load would just produce `OperandValue::Ref` instead
1138                 // of the `OperandValue::Immediate` we need for the call.
1139                 llval = bx.load(llval, align);
1140                 if let abi::Abi::Scalar(ref scalar) = arg.layout.abi {
1141                     if scalar.is_bool() {
1142                         bx.range_metadata(llval, 0..2);
1143                     }
1144                 }
1145                 // We store bools as `i8` so we need to truncate to `i1`.
1146                 llval = base::to_immediate(bx, llval, arg.layout);
1147             }
1148         }
1149
1150         llargs.push(llval);
1151     }
1152
1153     fn codegen_arguments_untupled(
1154         &mut self,
1155         bx: &mut Bx,
1156         operand: &mir::Operand<'tcx>,
1157         llargs: &mut Vec<Bx::Value>,
1158         args: &[ArgAbi<'tcx, Ty<'tcx>>],
1159     ) {
1160         let tuple = self.codegen_operand(bx, operand);
1161
1162         // Handle both by-ref and immediate tuples.
1163         if let Ref(llval, None, align) = tuple.val {
1164             let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align);
1165             for i in 0..tuple.layout.fields.count() {
1166                 let field_ptr = tuple_ptr.project_field(bx, i);
1167                 let field = bx.load_operand(field_ptr);
1168                 self.codegen_argument(bx, field, llargs, &args[i]);
1169             }
1170         } else if let Ref(_, Some(_), _) = tuple.val {
1171             bug!("closure arguments must be sized")
1172         } else {
1173             // If the tuple is immediate, the elements are as well.
1174             for i in 0..tuple.layout.fields.count() {
1175                 let op = tuple.extract_field(bx, i);
1176                 self.codegen_argument(bx, op, llargs, &args[i]);
1177             }
1178         }
1179     }
1180
1181     fn get_caller_location(&mut self, bx: &mut Bx, span: Span) -> OperandRef<'tcx, Bx::Value> {
1182         self.caller_location.unwrap_or_else(|| {
1183             let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
1184             let caller = bx.tcx().sess.source_map().lookup_char_pos(topmost.lo());
1185             let const_loc = bx.tcx().const_caller_location((
1186                 Symbol::intern(&caller.file.name.to_string()),
1187                 caller.line as u32,
1188                 caller.col_display as u32 + 1,
1189             ));
1190             OperandRef::from_const(bx, const_loc, bx.tcx().caller_location_ty())
1191         })
1192     }
1193
1194     fn get_personality_slot(&mut self, bx: &mut Bx) -> PlaceRef<'tcx, Bx::Value> {
1195         let cx = bx.cx();
1196         if let Some(slot) = self.personality_slot {
1197             slot
1198         } else {
1199             let layout = cx.layout_of(
1200                 cx.tcx().intern_tup(&[cx.tcx().mk_mut_ptr(cx.tcx().types.u8), cx.tcx().types.i32]),
1201             );
1202             let slot = PlaceRef::alloca(bx, layout);
1203             self.personality_slot = Some(slot);
1204             slot
1205         }
1206     }
1207
1208     /// Returns the landing-pad wrapper around the given basic block.
1209     ///
1210     /// No-op in MSVC SEH scheme.
1211     fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> Bx::BasicBlock {
1212         if let Some(block) = self.landing_pads[target_bb] {
1213             return block;
1214         }
1215
1216         let block = self.blocks[target_bb];
1217         let landing_pad = self.landing_pad_uncached(block);
1218         self.landing_pads[target_bb] = Some(landing_pad);
1219         landing_pad
1220     }
1221
1222     fn landing_pad_uncached(&mut self, target_bb: Bx::BasicBlock) -> Bx::BasicBlock {
1223         if base::wants_msvc_seh(self.cx.sess()) {
1224             span_bug!(self.mir.span, "landing pad was not inserted?")
1225         }
1226
1227         let mut bx = self.new_block("cleanup");
1228
1229         let llpersonality = self.cx.eh_personality();
1230         let llretty = self.landing_pad_type();
1231         let lp = bx.landing_pad(llretty, llpersonality, 1);
1232         bx.set_cleanup(lp);
1233
1234         let slot = self.get_personality_slot(&mut bx);
1235         slot.storage_live(&mut bx);
1236         Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&mut bx, slot);
1237
1238         bx.br(target_bb);
1239         bx.llbb()
1240     }
1241
1242     fn landing_pad_type(&self) -> Bx::Type {
1243         let cx = self.cx;
1244         cx.type_struct(&[cx.type_i8p(), cx.type_i32()], false)
1245     }
1246
1247     fn unreachable_block(&mut self) -> Bx::BasicBlock {
1248         self.unreachable_block.unwrap_or_else(|| {
1249             let mut bx = self.new_block("unreachable");
1250             bx.unreachable();
1251             self.unreachable_block = Some(bx.llbb());
1252             bx.llbb()
1253         })
1254     }
1255
1256     pub fn new_block(&self, name: &str) -> Bx {
1257         Bx::new_block(self.cx, self.llfn, name)
1258     }
1259
1260     pub fn build_block(&self, bb: mir::BasicBlock) -> Bx {
1261         let mut bx = Bx::with_cx(self.cx);
1262         bx.position_at_end(self.blocks[bb]);
1263         bx
1264     }
1265
1266     fn make_return_dest(
1267         &mut self,
1268         bx: &mut Bx,
1269         dest: mir::Place<'tcx>,
1270         fn_ret: &ArgAbi<'tcx, Ty<'tcx>>,
1271         llargs: &mut Vec<Bx::Value>,
1272         is_intrinsic: bool,
1273     ) -> ReturnDest<'tcx, Bx::Value> {
1274         // If the return is ignored, we can just return a do-nothing `ReturnDest`.
1275         if fn_ret.is_ignore() {
1276             return ReturnDest::Nothing;
1277         }
1278         let dest = if let Some(index) = dest.as_local() {
1279             match self.locals[index] {
1280                 LocalRef::Place(dest) => dest,
1281                 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
1282                 LocalRef::Operand(None) => {
1283                     // Handle temporary places, specifically `Operand` ones, as
1284                     // they don't have `alloca`s.
1285                     return if fn_ret.is_indirect() {
1286                         // Odd, but possible, case, we have an operand temporary,
1287                         // but the calling convention has an indirect return.
1288                         let tmp = PlaceRef::alloca(bx, fn_ret.layout);
1289                         tmp.storage_live(bx);
1290                         llargs.push(tmp.llval);
1291                         ReturnDest::IndirectOperand(tmp, index)
1292                     } else if is_intrinsic {
1293                         // Currently, intrinsics always need a location to store
1294                         // the result, so we create a temporary `alloca` for the
1295                         // result.
1296                         let tmp = PlaceRef::alloca(bx, fn_ret.layout);
1297                         tmp.storage_live(bx);
1298                         ReturnDest::IndirectOperand(tmp, index)
1299                     } else {
1300                         ReturnDest::DirectOperand(index)
1301                     };
1302                 }
1303                 LocalRef::Operand(Some(_)) => {
1304                     bug!("place local already assigned to");
1305                 }
1306             }
1307         } else {
1308             self.codegen_place(
1309                 bx,
1310                 mir::PlaceRef { local: dest.local, projection: &dest.projection },
1311             )
1312         };
1313         if fn_ret.is_indirect() {
1314             if dest.align < dest.layout.align.abi {
1315                 // Currently, MIR code generation does not create calls
1316                 // that store directly to fields of packed structs (in
1317                 // fact, the calls it creates write only to temps).
1318                 //
1319                 // If someone changes that, please update this code path
1320                 // to create a temporary.
1321                 span_bug!(self.mir.span, "can't directly store to unaligned value");
1322             }
1323             llargs.push(dest.llval);
1324             ReturnDest::Nothing
1325         } else {
1326             ReturnDest::Store(dest)
1327         }
1328     }
1329
1330     fn codegen_transmute(&mut self, bx: &mut Bx, src: &mir::Operand<'tcx>, dst: mir::Place<'tcx>) {
1331         if let Some(index) = dst.as_local() {
1332             match self.locals[index] {
1333                 LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
1334                 LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"),
1335                 LocalRef::Operand(None) => {
1336                     let dst_layout = bx.layout_of(self.monomorphized_place_ty(dst.as_ref()));
1337                     assert!(!dst_layout.ty.has_erasable_regions());
1338                     let place = PlaceRef::alloca(bx, dst_layout);
1339                     place.storage_live(bx);
1340                     self.codegen_transmute_into(bx, src, place);
1341                     let op = bx.load_operand(place);
1342                     place.storage_dead(bx);
1343                     self.locals[index] = LocalRef::Operand(Some(op));
1344                     self.debug_introduce_local(bx, index);
1345                 }
1346                 LocalRef::Operand(Some(op)) => {
1347                     assert!(op.layout.is_zst(), "assigning to initialized SSAtemp");
1348                 }
1349             }
1350         } else {
1351             let dst = self.codegen_place(bx, dst.as_ref());
1352             self.codegen_transmute_into(bx, src, dst);
1353         }
1354     }
1355
1356     fn codegen_transmute_into(
1357         &mut self,
1358         bx: &mut Bx,
1359         src: &mir::Operand<'tcx>,
1360         dst: PlaceRef<'tcx, Bx::Value>,
1361     ) {
1362         let src = self.codegen_operand(bx, src);
1363         let llty = bx.backend_type(src.layout);
1364         let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty));
1365         let align = src.layout.align.abi.min(dst.align);
1366         src.val.store(bx, PlaceRef::new_sized_aligned(cast_ptr, src.layout, align));
1367     }
1368
1369     // Stores the return value of a function call into it's final location.
1370     fn store_return(
1371         &mut self,
1372         bx: &mut Bx,
1373         dest: ReturnDest<'tcx, Bx::Value>,
1374         ret_abi: &ArgAbi<'tcx, Ty<'tcx>>,
1375         llval: Bx::Value,
1376     ) {
1377         use self::ReturnDest::*;
1378
1379         match dest {
1380             Nothing => (),
1381             Store(dst) => bx.store_arg(&ret_abi, llval, dst),
1382             IndirectOperand(tmp, index) => {
1383                 let op = bx.load_operand(tmp);
1384                 tmp.storage_dead(bx);
1385                 self.locals[index] = LocalRef::Operand(Some(op));
1386                 self.debug_introduce_local(bx, index);
1387             }
1388             DirectOperand(index) => {
1389                 // If there is a cast, we have to store and reload.
1390                 let op = if let PassMode::Cast(_) = ret_abi.mode {
1391                     let tmp = PlaceRef::alloca(bx, ret_abi.layout);
1392                     tmp.storage_live(bx);
1393                     bx.store_arg(&ret_abi, llval, tmp);
1394                     let op = bx.load_operand(tmp);
1395                     tmp.storage_dead(bx);
1396                     op
1397                 } else {
1398                     OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout)
1399                 };
1400                 self.locals[index] = LocalRef::Operand(Some(op));
1401                 self.debug_introduce_local(bx, index);
1402             }
1403         }
1404     }
1405 }
1406
1407 enum ReturnDest<'tcx, V> {
1408     // Do nothing; the return value is indirect or ignored.
1409     Nothing,
1410     // Store the return value to the pointer.
1411     Store(PlaceRef<'tcx, V>),
1412     // Store an indirect return value to an operand local place.
1413     IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
1414     // Store a direct return value to an operand local place.
1415     DirectOperand(mir::Local),
1416 }