]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/mir/block.rs
Beginning of moving all backend-agnostic code to rustc_codegen_ssa
[rust.git] / src / librustc_codegen_llvm / mir / block.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use rustc::middle::lang_items;
12 use rustc::ty::{self, Ty, TypeFoldable};
13 use rustc::ty::layout::{self, LayoutOf, HasTyCtxt};
14 use rustc::mir;
15 use rustc::mir::interpret::EvalErrorKind;
16 use abi::{Abi, FnType, PassMode};
17 use rustc_target::abi::call::ArgType;
18 use base;
19 use builder::MemFlags;
20 use common;
21 use rustc_codegen_ssa::common::IntPredicate;
22 use meth;
23 use monomorphize;
24
25 use interfaces::*;
26
27 use syntax::symbol::Symbol;
28 use syntax_pos::Pos;
29
30 use super::{FunctionCx, LocalRef};
31 use super::place::PlaceRef;
32 use super::operand::OperandRef;
33 use super::operand::OperandValue::{Pair, Ref, Immediate};
34
35 impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
36     pub fn codegen_block(
37         &mut self,
38         bb: mir::BasicBlock,
39     ) {
40         let mut bx = self.build_block(bb);
41         let data = &self.mir[bb];
42
43         debug!("codegen_block({:?}={:?})", bb, data);
44
45         for statement in &data.statements {
46             bx = self.codegen_statement(bx, statement);
47         }
48
49         self.codegen_terminator(bx, bb, data.terminator());
50     }
51
52     fn codegen_terminator(
53         &mut self,
54         mut bx: Bx,
55         bb: mir::BasicBlock,
56         terminator: &mir::Terminator<'tcx>
57     ) {
58         debug!("codegen_terminator: {:?}", terminator);
59
60         // Create the cleanup bundle, if needed.
61         let tcx = self.cx.tcx();
62         let span = terminator.source_info.span;
63         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
64
65         // HACK(eddyb) force the right lifetimes, NLL can't figure them out.
66         fn funclet_closure_factory<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
67             funclet_bb: Option<mir::BasicBlock>
68         ) -> impl for<'b> Fn(
69             &'b FunctionCx<'a, 'tcx, Bx>,
70         ) -> Option<&'b Bx::Funclet> {
71             move |this| {
72                 match funclet_bb {
73                     Some(funclet_bb) => this.funclets[funclet_bb].as_ref(),
74                     None => None,
75                 }
76             }
77         }
78         let funclet = funclet_closure_factory(funclet_bb);
79
80         let lltarget = |this: &mut Self, target: mir::BasicBlock| {
81             let lltarget = this.blocks[target];
82             let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
83             match (funclet_bb, target_funclet) {
84                 (None, None) => (lltarget, false),
85                 (Some(f), Some(t_f))
86                     if f == t_f || !base::wants_msvc_seh(tcx.sess)
87                     => (lltarget, false),
88                 (None, Some(_)) => {
89                     // jump *into* cleanup - need a landing pad if GNU
90                     (this.landing_pad_to(target), false)
91                 }
92                 (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
93                 (Some(_), Some(_)) => {
94                     (this.landing_pad_to(target), true)
95                 }
96             }
97         };
98
99         let llblock = |this: &mut Self, target: mir::BasicBlock| {
100             let (lltarget, is_cleanupret) = lltarget(this, target);
101             if is_cleanupret {
102                 // MSVC cross-funclet jump - need a trampoline
103
104                 debug!("llblock: creating cleanup trampoline for {:?}", target);
105                 let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
106                 let trampoline = this.new_block(name);
107                 trampoline.cleanup_ret(funclet(this).unwrap(), Some(lltarget));
108                 trampoline.llbb()
109             } else {
110                 lltarget
111             }
112         };
113
114         let funclet_br =
115             |this: &mut Self, bx: &Bx, target: mir::BasicBlock| {
116                 let (lltarget, is_cleanupret) = lltarget(this, target);
117                 if is_cleanupret {
118                     // micro-optimization: generate a `ret` rather than a jump
119                     // to a trampoline.
120                     bx.cleanup_ret(funclet(this).unwrap(), Some(lltarget));
121                 } else {
122                     bx.br(lltarget);
123                 }
124             };
125
126         let do_call = |
127             this: &mut Self,
128             bx: &Bx,
129             fn_ty: FnType<'tcx, Ty<'tcx>>,
130             fn_ptr: Bx::Value,
131             llargs: &[Bx::Value],
132             destination: Option<(ReturnDest<'tcx, Bx::Value>, mir::BasicBlock)>,
133             cleanup: Option<mir::BasicBlock>
134         | {
135             if let Some(cleanup) = cleanup {
136                 let ret_bx = if let Some((_, target)) = destination {
137                     this.blocks[target]
138                 } else {
139                     this.unreachable_block()
140                 };
141                 let invokeret = bx.invoke(fn_ptr,
142                                           &llargs,
143                                           ret_bx,
144                                           llblock(this, cleanup),
145                                           funclet(this));
146                 bx.apply_attrs_callsite(&fn_ty, invokeret);
147
148                 if let Some((ret_dest, target)) = destination {
149                     let ret_bx = this.build_block(target);
150                     this.set_debug_loc(&ret_bx, terminator.source_info);
151                     this.store_return(&ret_bx, ret_dest, &fn_ty.ret, invokeret);
152                 }
153             } else {
154                 let llret = bx.call(fn_ptr, &llargs, funclet(this));
155                 bx.apply_attrs_callsite(&fn_ty, llret);
156                 if this.mir[bb].is_cleanup {
157                     // Cleanup is always the cold path. Don't inline
158                     // drop glue. Also, when there is a deeply-nested
159                     // struct, there are "symmetry" issues that cause
160                     // exponential inlining - see issue #41696.
161                     bx.do_not_inline(llret);
162                 }
163
164                 if let Some((ret_dest, target)) = destination {
165                     this.store_return(bx, ret_dest, &fn_ty.ret, llret);
166                     funclet_br(this, bx, target);
167                 } else {
168                     bx.unreachable();
169                 }
170             }
171         };
172
173         self.set_debug_loc(&bx, terminator.source_info);
174         match terminator.kind {
175             mir::TerminatorKind::Resume => {
176                 if let Some(funclet) = funclet(self) {
177                     bx.cleanup_ret(funclet, None);
178                 } else {
179                     let slot = self.get_personality_slot(&bx);
180                     let lp0 = bx.load_operand(slot.project_field(&bx, 0)).immediate();
181                     let lp1 = bx.load_operand(slot.project_field(&bx, 1)).immediate();
182                     slot.storage_dead(&bx);
183
184                     if !bx.cx().sess().target.target.options.custom_unwind_resume {
185                         let mut lp = bx.cx().const_undef(self.landing_pad_type());
186                         lp = bx.insert_value(lp, lp0, 0);
187                         lp = bx.insert_value(lp, lp1, 1);
188                         bx.resume(lp);
189                     } else {
190                         bx.call(bx.cx().eh_unwind_resume(), &[lp0], funclet(self));
191                         bx.unreachable();
192                     }
193                 }
194             }
195
196             mir::TerminatorKind::Abort => {
197                 // Call core::intrinsics::abort()
198                 let fnname = bx.cx().get_intrinsic(&("llvm.trap"));
199                 bx.call(fnname, &[], None);
200                 bx.unreachable();
201             }
202
203             mir::TerminatorKind::Goto { target } => {
204                 funclet_br(self, &bx, target);
205             }
206
207             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
208                 let discr = self.codegen_operand(&bx, discr);
209                 if targets.len() == 2 {
210                     // If there are two targets, emit br instead of switch
211                     let lltrue = llblock(self, targets[0]);
212                     let llfalse = llblock(self, targets[1]);
213                     if switch_ty == bx.tcx().types.bool {
214                         // Don't generate trivial icmps when switching on bool
215                         if let [0] = values[..] {
216                             bx.cond_br(discr.immediate(), llfalse, lltrue);
217                         } else {
218                             assert_eq!(&values[..], &[1]);
219                             bx.cond_br(discr.immediate(), lltrue, llfalse);
220                         }
221                     } else {
222                         let switch_llty = bx.cx().immediate_backend_type(
223                             bx.cx().layout_of(switch_ty)
224                         );
225                         let llval = bx.cx().const_uint_big(switch_llty, values[0]);
226                         let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
227                         bx.cond_br(cmp, lltrue, llfalse);
228                     }
229                 } else {
230                     let (otherwise, targets) = targets.split_last().unwrap();
231                     let switch = bx.switch(discr.immediate(),
232                                            llblock(self, *otherwise),
233                                            values.len());
234                     let switch_llty = bx.cx().immediate_backend_type(
235                         bx.cx().layout_of(switch_ty)
236                     );
237                     for (&value, target) in values.iter().zip(targets) {
238                         let llval = bx.cx().const_uint_big(switch_llty, value);
239                         let llbb = llblock(self, *target);
240                         bx.add_case(switch, llval, llbb)
241                     }
242                 }
243             }
244
245             mir::TerminatorKind::Return => {
246                 let llval = match self.fn_ty.ret.mode {
247                     PassMode::Ignore | PassMode::Indirect(..) => {
248                         bx.ret_void();
249                         return;
250                     }
251
252                     PassMode::Direct(_) | PassMode::Pair(..) => {
253                         let op = self.codegen_consume(&bx, &mir::Place::Local(mir::RETURN_PLACE));
254                         if let Ref(llval, _, align) = op.val {
255                             bx.load(llval, align)
256                         } else {
257                             op.immediate_or_packed_pair(&bx)
258                         }
259                     }
260
261                     PassMode::Cast(cast_ty) => {
262                         let op = match self.locals[mir::RETURN_PLACE] {
263                             LocalRef::Operand(Some(op)) => op,
264                             LocalRef::Operand(None) => bug!("use of return before def"),
265                             LocalRef::Place(cg_place) => {
266                                 OperandRef {
267                                     val: Ref(cg_place.llval, None, cg_place.align),
268                                     layout: cg_place.layout
269                                 }
270                             }
271                             LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
272                         };
273                         let llslot = match op.val {
274                             Immediate(_) | Pair(..) => {
275                                 let scratch = PlaceRef::alloca(&bx, self.fn_ty.ret.layout, "ret");
276                                 op.val.store(&bx, scratch);
277                                 scratch.llval
278                             }
279                             Ref(llval, _, align) => {
280                                 assert_eq!(align.abi(), op.layout.align.abi(),
281                                            "return place is unaligned!");
282                                 llval
283                             }
284                         };
285                         bx.load(
286                             bx.pointercast(llslot, bx.cx().type_ptr_to(
287                                 bx.cx().cast_backend_type(&cast_ty)
288                             )),
289                             self.fn_ty.ret.layout.align)
290                     }
291                 };
292                 bx.ret(llval);
293             }
294
295             mir::TerminatorKind::Unreachable => {
296                 bx.unreachable();
297             }
298
299             mir::TerminatorKind::Drop { ref location, target, unwind } => {
300                 let ty = location.ty(self.mir, bx.tcx()).to_ty(bx.tcx());
301                 let ty = self.monomorphize(&ty);
302                 let drop_fn = monomorphize::resolve_drop_in_place(bx.cx().tcx(), ty);
303
304                 if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
305                     // we don't actually need to drop anything.
306                     funclet_br(self, &bx, target);
307                     return
308                 }
309
310                 let place = self.codegen_place(&bx, location);
311                 let (args1, args2);
312                 let mut args = if let Some(llextra) = place.llextra {
313                     args2 = [place.llval, llextra];
314                     &args2[..]
315                 } else {
316                     args1 = [place.llval];
317                     &args1[..]
318                 };
319                 let (drop_fn, fn_ty) = match ty.sty {
320                     ty::Dynamic(..) => {
321                         let sig = drop_fn.fn_sig(tcx);
322                         let sig = tcx.normalize_erasing_late_bound_regions(
323                             ty::ParamEnv::reveal_all(),
324                             &sig,
325                         );
326                         let fn_ty = bx.cx().new_vtable(sig, &[]);
327                         let vtable = args[1];
328                         args = &args[..1];
329                         (meth::DESTRUCTOR.get_fn(&bx, vtable, &fn_ty), fn_ty)
330                     }
331                     _ => {
332                         (bx.cx().get_fn(drop_fn),
333                          bx.cx().fn_type_of_instance(&drop_fn))
334                     }
335                 };
336                 do_call(self, &bx, fn_ty, drop_fn, args,
337                         Some((ReturnDest::Nothing, target)),
338                         unwind);
339             }
340
341             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
342                 let cond = self.codegen_operand(&bx, cond).immediate();
343                 let mut const_cond = bx.cx().const_to_opt_u128(cond, false).map(|c| c == 1);
344
345                 // This case can currently arise only from functions marked
346                 // with #[rustc_inherit_overflow_checks] and inlined from
347                 // another crate (mostly core::num generic/#[inline] fns),
348                 // while the current crate doesn't use overflow checks.
349                 // NOTE: Unlike binops, negation doesn't have its own
350                 // checked operation, just a comparison with the minimum
351                 // value, so we have to check for the assert message.
352                 if !bx.cx().check_overflow() {
353                     if let mir::interpret::EvalErrorKind::OverflowNeg = *msg {
354                         const_cond = Some(expected);
355                     }
356                 }
357
358                 // Don't codegen the panic block if success if known.
359                 if const_cond == Some(expected) {
360                     funclet_br(self, &bx, target);
361                     return;
362                 }
363
364                 // Pass the condition through llvm.expect for branch hinting.
365                 let expect = bx.cx().get_intrinsic(&"llvm.expect.i1");
366                 let cond = bx.call(expect, &[cond, bx.cx().const_bool(expected)], None);
367
368                 // Create the failure block and the conditional branch to it.
369                 let lltarget = llblock(self, target);
370                 let panic_block = self.new_block("panic");
371                 if expected {
372                     bx.cond_br(cond, lltarget, panic_block.llbb());
373                 } else {
374                     bx.cond_br(cond, panic_block.llbb(), lltarget);
375                 }
376
377                 // After this point, bx is the block for the call to panic.
378                 bx = panic_block;
379                 self.set_debug_loc(&bx, terminator.source_info);
380
381                 // Get the location information.
382                 let loc = bx.cx().sess().source_map().lookup_char_pos(span.lo());
383                 let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
384                 let filename = bx.cx().const_str_slice(filename);
385                 let line = bx.cx().const_u32(loc.line as u32);
386                 let col = bx.cx().const_u32(loc.col.to_usize() as u32 + 1);
387                 let align = tcx.data_layout.aggregate_align
388                     .max(tcx.data_layout.i32_align)
389                     .max(tcx.data_layout.pointer_align);
390
391                 // Put together the arguments to the panic entry point.
392                 let (lang_item, args) = match *msg {
393                     EvalErrorKind::BoundsCheck { ref len, ref index } => {
394                         let len = self.codegen_operand(&bx, len).immediate();
395                         let index = self.codegen_operand(&bx, index).immediate();
396
397                         let file_line_col = bx.cx().const_struct(&[filename, line, col], false);
398                         let file_line_col = bx.cx().static_addr_of(
399                             file_line_col,
400                             align,
401                             Some("panic_bounds_check_loc")
402                         );
403                         (lang_items::PanicBoundsCheckFnLangItem,
404                          vec![file_line_col, index, len])
405                     }
406                     _ => {
407                         let str = msg.description();
408                         let msg_str = Symbol::intern(str).as_str();
409                         let msg_str = bx.cx().const_str_slice(msg_str);
410                         let msg_file_line_col = bx.cx().const_struct(
411                             &[msg_str, filename, line, col],
412                             false
413                         );
414                         let msg_file_line_col = bx.cx().static_addr_of(
415                             msg_file_line_col,
416                             align,
417                             Some("panic_loc")
418                         );
419                         (lang_items::PanicFnLangItem,
420                          vec![msg_file_line_col])
421                     }
422                 };
423
424                 // Obtain the panic entry point.
425                 let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
426                 let instance = ty::Instance::mono(bx.tcx(), def_id);
427                 let fn_ty = bx.cx().fn_type_of_instance(&instance);
428                 let llfn = bx.cx().get_fn(instance);
429
430                 // Codegen the actual panic invoke/call.
431                 do_call(self, &bx, fn_ty, llfn, &args, None, cleanup);
432             }
433
434             mir::TerminatorKind::DropAndReplace { .. } => {
435                 bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
436             }
437
438             mir::TerminatorKind::Call {
439                 ref func,
440                 ref args,
441                 ref destination,
442                 cleanup,
443                 from_hir_call: _
444             } => {
445                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
446                 let callee = self.codegen_operand(&bx, func);
447
448                 let (instance, mut llfn) = match callee.layout.ty.sty {
449                     ty::FnDef(def_id, substs) => {
450                         (Some(ty::Instance::resolve(bx.cx().tcx(),
451                                                     ty::ParamEnv::reveal_all(),
452                                                     def_id,
453                                                     substs).unwrap()),
454                          None)
455                     }
456                     ty::FnPtr(_) => {
457                         (None, Some(callee.immediate()))
458                     }
459                     _ => bug!("{} is not callable", callee.layout.ty)
460                 };
461                 let def = instance.map(|i| i.def);
462                 let sig = callee.layout.ty.fn_sig(bx.tcx());
463                 let sig = bx.tcx().normalize_erasing_late_bound_regions(
464                     ty::ParamEnv::reveal_all(),
465                     &sig,
466                 );
467                 let abi = sig.abi;
468
469                 // Handle intrinsics old codegen wants Expr's for, ourselves.
470                 let intrinsic = match def {
471                     Some(ty::InstanceDef::Intrinsic(def_id))
472                         => Some(bx.tcx().item_name(def_id).as_str()),
473                     _ => None
474                 };
475                 let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
476
477                 if intrinsic == Some("transmute") {
478                     if let Some(destination_ref) = destination.as_ref() {
479                         let &(ref dest, target) = destination_ref;
480                         self.codegen_transmute(&bx, &args[0], dest);
481                         funclet_br(self, &bx, target);
482                     } else {
483                         // If we are trying to transmute to an uninhabited type,
484                         // it is likely there is no allotted destination. In fact,
485                         // transmuting to an uninhabited type is UB, which means
486                         // we can do what we like. Here, we declare that transmuting
487                         // into an uninhabited type is impossible, so anything following
488                         // it must be unreachable.
489                         assert_eq!(bx.cx().layout_of(sig.output()).abi, layout::Abi::Uninhabited);
490                         bx.unreachable();
491                     }
492                     return;
493                 }
494
495                 let extra_args = &args[sig.inputs().len()..];
496                 let extra_args = extra_args.iter().map(|op_arg| {
497                     let op_ty = op_arg.ty(self.mir, bx.tcx());
498                     self.monomorphize(&op_ty)
499                 }).collect::<Vec<_>>();
500
501                 let fn_ty = match def {
502                     Some(ty::InstanceDef::Virtual(..)) => {
503                         bx.cx().new_vtable(sig, &extra_args)
504                     }
505                     Some(ty::InstanceDef::DropGlue(_, None)) => {
506                         // empty drop glue - a nop.
507                         let &(_, target) = destination.as_ref().unwrap();
508                         funclet_br(self, &bx, target);
509                         return;
510                     }
511                     _ => bx.cx().new_fn_type(sig, &extra_args)
512                 };
513
514                 // emit a panic instead of instantiating an uninhabited type
515                 if (intrinsic == Some("init") || intrinsic == Some("uninit")) &&
516                     fn_ty.ret.layout.abi.is_uninhabited()
517                 {
518                     let loc = bx.cx().sess().source_map().lookup_char_pos(span.lo());
519                     let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
520                     let filename = bx.cx().const_str_slice(filename);
521                     let line = bx.cx().const_u32(loc.line as u32);
522                     let col = bx.cx().const_u32(loc.col.to_usize() as u32 + 1);
523                     let align = tcx.data_layout.aggregate_align
524                         .max(tcx.data_layout.i32_align)
525                         .max(tcx.data_layout.pointer_align);
526
527                     let str = format!(
528                         "Attempted to instantiate uninhabited type {} using mem::{}",
529                         sig.output(),
530                         if intrinsic == Some("init") { "zeroed" } else { "uninitialized" }
531                     );
532                     let msg_str = Symbol::intern(&str).as_str();
533                     let msg_str = bx.cx().const_str_slice(msg_str);
534                     let msg_file_line_col = bx.cx().const_struct(
535                         &[msg_str, filename, line, col],
536                         false,
537                     );
538                     let msg_file_line_col = bx.cx().static_addr_of(
539                         msg_file_line_col,
540                         align,
541                         Some("panic_loc"),
542                     );
543
544                     // Obtain the panic entry point.
545                     let def_id =
546                         common::langcall(bx.tcx(), Some(span), "", lang_items::PanicFnLangItem);
547                     let instance = ty::Instance::mono(bx.tcx(), def_id);
548                     let fn_ty = bx.cx().fn_type_of_instance(&instance);
549                     let llfn = bx.cx().get_fn(instance);
550
551                     // Codegen the actual panic invoke/call.
552                     do_call(
553                         self,
554                         &bx,
555                         fn_ty,
556                         llfn,
557                         &[msg_file_line_col],
558                         destination.as_ref().map(|(_, bb)| (ReturnDest::Nothing, *bb)),
559                         cleanup,
560                     );
561                     return;
562                 }
563
564                 // The arguments we'll be passing. Plus one to account for outptr, if used.
565                 let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
566                 let mut llargs = Vec::with_capacity(arg_count);
567
568                 // Prepare the return value destination
569                 let ret_dest = if let Some((ref dest, _)) = *destination {
570                     let is_intrinsic = intrinsic.is_some();
571                     self.make_return_dest(&bx, dest, &fn_ty.ret, &mut llargs,
572                                           is_intrinsic)
573                 } else {
574                     ReturnDest::Nothing
575                 };
576
577                 if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
578                     let dest = match ret_dest {
579                         _ if fn_ty.ret.is_indirect() => llargs[0],
580                         ReturnDest::Nothing => {
581                             bx.cx().const_undef(bx.cx().type_ptr_to(bx.memory_ty(&fn_ty.ret)))
582                         }
583                         ReturnDest::IndirectOperand(dst, _) |
584                         ReturnDest::Store(dst) => dst.llval,
585                         ReturnDest::DirectOperand(_) =>
586                             bug!("Cannot use direct operand with an intrinsic call")
587                     };
588
589                     let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
590                         // The indices passed to simd_shuffle* in the
591                         // third argument must be constant. This is
592                         // checked by const-qualification, which also
593                         // promotes any complex rvalues to constants.
594                         if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
595                             match *arg {
596                                 // The shuffle array argument is usually not an explicit constant,
597                                 // but specified directly in the code. This means it gets promoted
598                                 // and we can then extract the value by evaluating the promoted.
599                                 mir::Operand::Copy(mir::Place::Promoted(box(index, ty))) |
600                                 mir::Operand::Move(mir::Place::Promoted(box(index, ty))) => {
601                                     let param_env = ty::ParamEnv::reveal_all();
602                                     let cid = mir::interpret::GlobalId {
603                                         instance: self.instance,
604                                         promoted: Some(index),
605                                     };
606                                     let c = bx.tcx().const_eval(param_env.and(cid));
607                                     let (llval, ty) = self.simd_shuffle_indices(
608                                         &bx,
609                                         terminator.source_info.span,
610                                         ty,
611                                         c,
612                                     );
613                                     return OperandRef {
614                                         val: Immediate(llval),
615                                         layout: bx.cx().layout_of(ty),
616                                     };
617
618                                 },
619                                 mir::Operand::Copy(_) |
620                                 mir::Operand::Move(_) => {
621                                     span_bug!(span, "shuffle indices must be constant");
622                                 }
623                                 mir::Operand::Constant(ref constant) => {
624                                     let c = self.eval_mir_constant(&bx, constant);
625                                     let (llval, ty) = self.simd_shuffle_indices(
626                                         &bx,
627                                         constant.span,
628                                         constant.ty,
629                                         c,
630                                     );
631                                     return OperandRef {
632                                         val: Immediate(llval),
633                                         layout: bx.cx().layout_of(ty)
634                                     };
635                                 }
636                             }
637                         }
638
639                         self.codegen_operand(&bx, arg)
640                     }).collect();
641
642
643                     let callee_ty = instance.as_ref().unwrap().ty(bx.cx().tcx());
644                     bx.codegen_intrinsic_call(callee_ty, &fn_ty, &args, dest,
645                                                terminator.source_info.span);
646
647                     if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
648                         self.store_return(&bx, ret_dest, &fn_ty.ret, dst.llval);
649                     }
650
651                     if let Some((_, target)) = *destination {
652                         funclet_br(self, &bx, target);
653                     } else {
654                         bx.unreachable();
655                     }
656
657                     return;
658                 }
659
660                 // Split the rust-call tupled arguments off.
661                 let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
662                     let (tup, args) = args.split_last().unwrap();
663                     (args, Some(tup))
664                 } else {
665                     (&args[..], None)
666                 };
667
668                 'make_args: for (i, arg) in first_args.iter().enumerate() {
669                     let mut op = self.codegen_operand(&bx, arg);
670
671                     if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
672                         if let Pair(..) = op.val {
673                             // In the case of Rc<Self>, we need to explicitly pass a
674                             // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
675                             // that is understood elsewhere in the compiler as a method on
676                             // `dyn Trait`.
677                             // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
678                             // we get a value of a built-in pointer type
679                             'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
680                                             && !op.layout.ty.is_region_ptr()
681                             {
682                                 'iter_fields: for i in 0..op.layout.fields.count() {
683                                     let field = op.extract_field(&bx, i);
684                                     if !field.layout.is_zst() {
685                                         // we found the one non-zero-sized field that is allowed
686                                         // now find *its* non-zero-sized field, or stop if it's a
687                                         // pointer
688                                         op = field;
689                                         continue 'descend_newtypes
690                                     }
691                                 }
692
693                                 span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
694                             }
695
696                             // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
697                             // data pointer and vtable. Look up the method in the vtable, and pass
698                             // the data pointer as the first argument
699                             match op.val {
700                                 Pair(data_ptr, meta) => {
701                                     llfn = Some(meth::VirtualIndex::from_index(idx)
702                                         .get_fn(&bx, meta, &fn_ty));
703                                     llargs.push(data_ptr);
704                                     continue 'make_args
705                                 }
706                                 other => bug!("expected a Pair, got {:?}", other)
707                             }
708                         } else if let Ref(data_ptr, Some(meta), _) = op.val {
709                             // by-value dynamic dispatch
710                             llfn = Some(meth::VirtualIndex::from_index(idx)
711                                 .get_fn(&bx, meta, &fn_ty));
712                             llargs.push(data_ptr);
713                             continue;
714                         } else {
715                             span_bug!(span, "can't codegen a virtual call on {:?}", op);
716                         }
717                     }
718
719                     // The callee needs to own the argument memory if we pass it
720                     // by-ref, so make a local copy of non-immediate constants.
721                     match (arg, op.val) {
722                         (&mir::Operand::Copy(_), Ref(_, None, _)) |
723                         (&mir::Operand::Constant(_), Ref(_, None, _)) => {
724                             let tmp = PlaceRef::alloca(&bx, op.layout, "const");
725                             op.val.store(&bx, tmp);
726                             op.val = Ref(tmp.llval, None, tmp.align);
727                         }
728                         _ => {}
729                     }
730
731                     self.codegen_argument(&bx, op, &mut llargs, &fn_ty.args[i]);
732                 }
733                 if let Some(tup) = untuple {
734                     self.codegen_arguments_untupled(&bx, tup, &mut llargs,
735                         &fn_ty.args[first_args.len()..])
736                 }
737
738                 let fn_ptr = match (llfn, instance) {
739                     (Some(llfn), _) => llfn,
740                     (None, Some(instance)) => bx.cx().get_fn(instance),
741                     _ => span_bug!(span, "no llfn for call"),
742                 };
743
744                 do_call(self, &bx, fn_ty, fn_ptr, &llargs,
745                         destination.as_ref().map(|&(_, target)| (ret_dest, target)),
746                         cleanup);
747             }
748             mir::TerminatorKind::GeneratorDrop |
749             mir::TerminatorKind::Yield { .. } => bug!("generator ops in codegen"),
750             mir::TerminatorKind::FalseEdges { .. } |
751             mir::TerminatorKind::FalseUnwind { .. } => bug!("borrowck false edges in codegen"),
752         }
753     }
754
755     fn codegen_argument(
756         &mut self,
757         bx: &Bx,
758         op: OperandRef<'tcx, Bx::Value>,
759         llargs: &mut Vec<Bx::Value>,
760         arg: &ArgType<'tcx, Ty<'tcx>>
761     ) {
762         // Fill padding with undef value, where applicable.
763         if let Some(ty) = arg.pad {
764             llargs.push(bx.cx().const_undef(bx.cx().reg_backend_type(&ty)))
765         }
766
767         if arg.is_ignore() {
768             return;
769         }
770
771         if let PassMode::Pair(..) = arg.mode {
772             match op.val {
773                 Pair(a, b) => {
774                     llargs.push(a);
775                     llargs.push(b);
776                     return;
777                 }
778                 _ => bug!("codegen_argument: {:?} invalid for pair argument", op)
779             }
780         } else if arg.is_unsized_indirect() {
781             match op.val {
782                 Ref(a, Some(b), _) => {
783                     llargs.push(a);
784                     llargs.push(b);
785                     return;
786                 }
787                 _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op)
788             }
789         }
790
791         // Force by-ref if we have to load through a cast pointer.
792         let (mut llval, align, by_ref) = match op.val {
793             Immediate(_) | Pair(..) => {
794                 match arg.mode {
795                     PassMode::Indirect(..) | PassMode::Cast(_) => {
796                         let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
797                         op.val.store(bx, scratch);
798                         (scratch.llval, scratch.align, true)
799                     }
800                     _ => {
801                         (op.immediate_or_packed_pair(bx), arg.layout.align, false)
802                     }
803                 }
804             }
805             Ref(llval, _, align) => {
806                 if arg.is_indirect() && align.abi() < arg.layout.align.abi() {
807                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
808                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
809                     // have scary latent bugs around.
810
811                     let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
812                     base::memcpy_ty(bx, scratch.llval, scratch.align, llval, align,
813                                     op.layout, MemFlags::empty());
814                     (scratch.llval, scratch.align, true)
815                 } else {
816                     (llval, align, true)
817                 }
818             }
819         };
820
821         if by_ref && !arg.is_indirect() {
822             // Have to load the argument, maybe while casting it.
823             if let PassMode::Cast(ty) = arg.mode {
824                 llval = bx.load(bx.pointercast(llval, bx.cx().type_ptr_to(
825                     bx.cx().cast_backend_type(&ty))
826                 ), align.min(arg.layout.align));
827             } else {
828                 // We can't use `PlaceRef::load` here because the argument
829                 // may have a type we don't treat as immediate, but the ABI
830                 // used for this call is passing it by-value. In that case,
831                 // the load would just produce `OperandValue::Ref` instead
832                 // of the `OperandValue::Immediate` we need for the call.
833                 llval = bx.load(llval, align);
834                 if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
835                     if scalar.is_bool() {
836                         bx.range_metadata(llval, 0..2);
837                     }
838                 }
839                 // We store bools as i8 so we need to truncate to i1.
840                 llval = base::to_immediate(bx, llval, arg.layout);
841             }
842         }
843
844         llargs.push(llval);
845     }
846
847     fn codegen_arguments_untupled(
848         &mut self,
849         bx: &Bx,
850         operand: &mir::Operand<'tcx>,
851         llargs: &mut Vec<Bx::Value>,
852         args: &[ArgType<'tcx, Ty<'tcx>>]
853     ) {
854         let tuple = self.codegen_operand(bx, operand);
855
856         // Handle both by-ref and immediate tuples.
857         if let Ref(llval, None, align) = tuple.val {
858             let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align);
859             for i in 0..tuple.layout.fields.count() {
860                 let field_ptr = tuple_ptr.project_field(bx, i);
861                 self.codegen_argument(bx, bx.load_operand(field_ptr), llargs, &args[i]);
862             }
863         } else if let Ref(_, Some(_), _) = tuple.val {
864             bug!("closure arguments must be sized")
865         } else {
866             // If the tuple is immediate, the elements are as well.
867             for i in 0..tuple.layout.fields.count() {
868                 let op = tuple.extract_field(bx, i);
869                 self.codegen_argument(bx, op, llargs, &args[i]);
870             }
871         }
872     }
873
874     fn get_personality_slot(
875         &mut self,
876         bx: &Bx
877     ) -> PlaceRef<'tcx, Bx::Value> {
878         let cx = bx.cx();
879         if let Some(slot) = self.personality_slot {
880             slot
881         } else {
882             let layout = cx.layout_of(cx.tcx().intern_tup(&[
883                 cx.tcx().mk_mut_ptr(cx.tcx().types.u8),
884                 cx.tcx().types.i32
885             ]));
886             let slot = PlaceRef::alloca(bx, layout, "personalityslot");
887             self.personality_slot = Some(slot);
888             slot
889         }
890     }
891
892     /// Return the landingpad wrapper around the given basic block
893     ///
894     /// No-op in MSVC SEH scheme.
895     fn landing_pad_to(
896         &mut self,
897         target_bb: mir::BasicBlock
898     ) -> Bx::BasicBlock {
899         if let Some(block) = self.landing_pads[target_bb] {
900             return block;
901         }
902
903         let block = self.blocks[target_bb];
904         let landing_pad = self.landing_pad_uncached(block);
905         self.landing_pads[target_bb] = Some(landing_pad);
906         landing_pad
907     }
908
909     fn landing_pad_uncached(
910         &mut self,
911         target_bb: Bx::BasicBlock
912     ) -> Bx::BasicBlock {
913         if base::wants_msvc_seh(self.cx.sess()) {
914             span_bug!(self.mir.span, "landing pad was not inserted?")
915         }
916
917         let bx = self.new_block("cleanup");
918
919         let llpersonality = self.cx.eh_personality();
920         let llretty = self.landing_pad_type();
921         let lp = bx.landing_pad(llretty, llpersonality, 1);
922         bx.set_cleanup(lp);
923
924         let slot = self.get_personality_slot(&bx);
925         slot.storage_live(&bx);
926         Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&bx, slot);
927
928         bx.br(target_bb);
929         bx.llbb()
930     }
931
932     fn landing_pad_type(&self) -> Bx::Type {
933         let cx = self.cx;
934         cx.type_struct(&[cx.type_i8p(), cx.type_i32()], false)
935     }
936
937     fn unreachable_block(
938         &mut self
939     ) -> Bx::BasicBlock {
940         self.unreachable_block.unwrap_or_else(|| {
941             let bx = self.new_block("unreachable");
942             bx.unreachable();
943             self.unreachable_block = Some(bx.llbb());
944             bx.llbb()
945         })
946     }
947
948     pub fn new_block(&self, name: &str) -> Bx {
949         Bx::new_block(self.cx, self.llfn, name)
950     }
951
952     pub fn build_block(
953         &self,
954         bb: mir::BasicBlock
955     ) -> Bx {
956         let bx = Bx::with_cx(self.cx);
957         bx.position_at_end(self.blocks[bb]);
958         bx
959     }
960
961     fn make_return_dest(
962         &mut self,
963         bx: &Bx,
964         dest: &mir::Place<'tcx>,
965         fn_ret: &ArgType<'tcx, Ty<'tcx>>,
966         llargs: &mut Vec<Bx::Value>, is_intrinsic: bool
967     ) -> ReturnDest<'tcx, Bx::Value> {
968         // If the return is ignored, we can just return a do-nothing ReturnDest
969         if fn_ret.is_ignore() {
970             return ReturnDest::Nothing;
971         }
972         let dest = if let mir::Place::Local(index) = *dest {
973             match self.locals[index] {
974                 LocalRef::Place(dest) => dest,
975                 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
976                 LocalRef::Operand(None) => {
977                     // Handle temporary places, specifically Operand ones, as
978                     // they don't have allocas
979                     return if fn_ret.is_indirect() {
980                         // Odd, but possible, case, we have an operand temporary,
981                         // but the calling convention has an indirect return.
982                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
983                         tmp.storage_live(bx);
984                         llargs.push(tmp.llval);
985                         ReturnDest::IndirectOperand(tmp, index)
986                     } else if is_intrinsic {
987                         // Currently, intrinsics always need a location to store
988                         // the result. so we create a temporary alloca for the
989                         // result
990                         let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
991                         tmp.storage_live(bx);
992                         ReturnDest::IndirectOperand(tmp, index)
993                     } else {
994                         ReturnDest::DirectOperand(index)
995                     };
996                 }
997                 LocalRef::Operand(Some(_)) => {
998                     bug!("place local already assigned to");
999                 }
1000             }
1001         } else {
1002             self.codegen_place(bx, dest)
1003         };
1004         if fn_ret.is_indirect() {
1005             if dest.align.abi() < dest.layout.align.abi() {
1006                 // Currently, MIR code generation does not create calls
1007                 // that store directly to fields of packed structs (in
1008                 // fact, the calls it creates write only to temps),
1009                 //
1010                 // If someone changes that, please update this code path
1011                 // to create a temporary.
1012                 span_bug!(self.mir.span, "can't directly store to unaligned value");
1013             }
1014             llargs.push(dest.llval);
1015             ReturnDest::Nothing
1016         } else {
1017             ReturnDest::Store(dest)
1018         }
1019     }
1020
1021     fn codegen_transmute(
1022         &mut self,
1023         bx: &Bx,
1024         src: &mir::Operand<'tcx>,
1025         dst: &mir::Place<'tcx>
1026     ) {
1027         if let mir::Place::Local(index) = *dst {
1028             match self.locals[index] {
1029                 LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
1030                 LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"),
1031                 LocalRef::Operand(None) => {
1032                     let dst_layout = bx.cx().layout_of(self.monomorphized_place_ty(dst));
1033                     assert!(!dst_layout.ty.has_erasable_regions());
1034                     let place = PlaceRef::alloca(bx, dst_layout, "transmute_temp");
1035                     place.storage_live(bx);
1036                     self.codegen_transmute_into(bx, src, place);
1037                     let op = bx.load_operand(place);
1038                     place.storage_dead(bx);
1039                     self.locals[index] = LocalRef::Operand(Some(op));
1040                 }
1041                 LocalRef::Operand(Some(op)) => {
1042                     assert!(op.layout.is_zst(),
1043                             "assigning to initialized SSAtemp");
1044                 }
1045             }
1046         } else {
1047             let dst = self.codegen_place(bx, dst);
1048             self.codegen_transmute_into(bx, src, dst);
1049         }
1050     }
1051
1052     fn codegen_transmute_into(
1053         &mut self,
1054         bx: &Bx,
1055         src: &mir::Operand<'tcx>,
1056         dst: PlaceRef<'tcx, Bx::Value>
1057     ) {
1058         let src = self.codegen_operand(bx, src);
1059         let llty = bx.cx().backend_type(src.layout);
1060         let cast_ptr = bx.pointercast(dst.llval, bx.cx().type_ptr_to(llty));
1061         let align = src.layout.align.min(dst.layout.align);
1062         src.val.store(bx, PlaceRef::new_sized(cast_ptr, src.layout, align));
1063     }
1064
1065
1066     // Stores the return value of a function call into it's final location.
1067     fn store_return(
1068         &mut self,
1069         bx: &Bx,
1070         dest: ReturnDest<'tcx, Bx::Value>,
1071         ret_ty: &ArgType<'tcx, Ty<'tcx>>,
1072         llval: Bx::Value
1073     ) {
1074         use self::ReturnDest::*;
1075
1076         match dest {
1077             Nothing => (),
1078             Store(dst) => bx.store_arg_ty(&ret_ty, llval, dst),
1079             IndirectOperand(tmp, index) => {
1080                 let op = bx.load_operand(tmp);
1081                 tmp.storage_dead(bx);
1082                 self.locals[index] = LocalRef::Operand(Some(op));
1083             }
1084             DirectOperand(index) => {
1085                 // If there is a cast, we have to store and reload.
1086                 let op = if let PassMode::Cast(_) = ret_ty.mode {
1087                     let tmp = PlaceRef::alloca(bx, ret_ty.layout, "tmp_ret");
1088                     tmp.storage_live(bx);
1089                     bx.store_arg_ty(&ret_ty, llval, tmp);
1090                     let op = bx.load_operand(tmp);
1091                     tmp.storage_dead(bx);
1092                     op
1093                 } else {
1094                     OperandRef::from_immediate_or_packed_pair(bx, llval, ret_ty.layout)
1095                 };
1096                 self.locals[index] = LocalRef::Operand(Some(op));
1097             }
1098         }
1099     }
1100 }
1101
1102 enum ReturnDest<'tcx, V> {
1103     // Do nothing, the return value is indirect or ignored
1104     Nothing,
1105     // Store the return value to the pointer
1106     Store(PlaceRef<'tcx, V>),
1107     // Stores an indirect return value to an operand local place
1108     IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
1109     // Stores a direct return value to an operand local place
1110     DirectOperand(mir::Local)
1111 }