]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_ssa/src/mir/block.rs
Rollup merge of #103342 - Rageking8:add-test-for-issue-98634, r=compiler-errors
[rust.git] / compiler / rustc_codegen_ssa / src / mir / block.rs
1 use super::operand::OperandRef;
2 use super::operand::OperandValue::{Immediate, Pair, Ref};
3 use super::place::PlaceRef;
4 use super::{FunctionCx, LocalRef};
5
6 use crate::base;
7 use crate::common::{self, IntPredicate};
8 use crate::meth;
9 use crate::traits::*;
10 use crate::MemFlags;
11
12 use rustc_ast as ast;
13 use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
14 use rustc_hir::lang_items::LangItem;
15 use rustc_index::vec::Idx;
16 use rustc_middle::mir::{self, AssertKind, SwitchTargets};
17 use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
18 use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
19 use rustc_middle::ty::{self, Instance, Ty, TypeVisitable};
20 use rustc_span::source_map::Span;
21 use rustc_span::{sym, Symbol};
22 use rustc_symbol_mangling::typeid::typeid_for_fnabi;
23 use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode, Reg};
24 use rustc_target::abi::{self, HasDataLayout, WrappingRange};
25 use rustc_target::spec::abi::Abi;
26
27 /// Used by `FunctionCx::codegen_terminator` for emitting common patterns
28 /// e.g., creating a basic block, calling a function, etc.
29 struct TerminatorCodegenHelper<'tcx> {
30     bb: mir::BasicBlock,
31     terminator: &'tcx mir::Terminator<'tcx>,
32     funclet_bb: Option<mir::BasicBlock>,
33 }
34
35 impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
36     /// Returns the appropriate `Funclet` for the current funclet, if on MSVC,
37     /// either already previously cached, or newly created, by `landing_pad_for`.
38     fn funclet<'b, Bx: BuilderMethods<'a, 'tcx>>(
39         &self,
40         fx: &'b mut FunctionCx<'a, 'tcx, Bx>,
41     ) -> Option<&'b Bx::Funclet> {
42         let funclet_bb = self.funclet_bb?;
43         if base::wants_msvc_seh(fx.cx.tcx().sess) {
44             // If `landing_pad_for` hasn't been called yet to create the `Funclet`,
45             // it has to be now. This may not seem necessary, as RPO should lead
46             // to all the unwind edges being visited (and so to `landing_pad_for`
47             // getting called for them), before building any of the blocks inside
48             // the funclet itself - however, if MIR contains edges that end up not
49             // being needed in the LLVM IR after monomorphization, the funclet may
50             // be unreachable, and we don't have yet a way to skip building it in
51             // such an eventuality (which may be a better solution than this).
52             if fx.funclets[funclet_bb].is_none() {
53                 fx.landing_pad_for(funclet_bb);
54             }
55
56             Some(
57                 fx.funclets[funclet_bb]
58                     .as_ref()
59                     .expect("landing_pad_for didn't also create funclets entry"),
60             )
61         } else {
62             None
63         }
64     }
65
66     /// Get a basic block (creating it if necessary), possibly with a landing
67     /// pad next to it.
68     fn llbb_with_landing_pad<Bx: BuilderMethods<'a, 'tcx>>(
69         &self,
70         fx: &mut FunctionCx<'a, 'tcx, Bx>,
71         target: mir::BasicBlock,
72     ) -> (Bx::BasicBlock, bool) {
73         let span = self.terminator.source_info.span;
74         let lltarget = fx.llbb(target);
75         let target_funclet = fx.cleanup_kinds[target].funclet_bb(target);
76         match (self.funclet_bb, target_funclet) {
77             (None, None) => (lltarget, false),
78             // jump *into* cleanup - need a landing pad if GNU, cleanup pad if MSVC
79             (None, Some(_)) => (fx.landing_pad_for(target), false),
80             (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", self.terminator),
81             (Some(f), Some(t_f)) => {
82                 if f == t_f || !base::wants_msvc_seh(fx.cx.tcx().sess) {
83                     (lltarget, false)
84                 } else {
85                     (fx.landing_pad_for(target), true)
86                 }
87             }
88         }
89     }
90
91     /// Get a basic block (creating it if necessary), possibly with cleanup
92     /// stuff in it or next to it.
93     fn llbb_with_cleanup<Bx: BuilderMethods<'a, 'tcx>>(
94         &self,
95         fx: &mut FunctionCx<'a, 'tcx, Bx>,
96         target: mir::BasicBlock,
97     ) -> Bx::BasicBlock {
98         let (lltarget, is_cleanupret) = self.llbb_with_landing_pad(fx, target);
99         if is_cleanupret {
100             // MSVC cross-funclet jump - need a trampoline
101             debug_assert!(base::wants_msvc_seh(fx.cx.tcx().sess));
102             debug!("llbb_with_cleanup: creating cleanup trampoline for {:?}", target);
103             let name = &format!("{:?}_cleanup_trampoline_{:?}", self.bb, target);
104             let trampoline_llbb = Bx::append_block(fx.cx, fx.llfn, name);
105             let mut trampoline_bx = Bx::build(fx.cx, trampoline_llbb);
106             trampoline_bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
107             trampoline_llbb
108         } else {
109             lltarget
110         }
111     }
112
113     fn funclet_br<Bx: BuilderMethods<'a, 'tcx>>(
114         &self,
115         fx: &mut FunctionCx<'a, 'tcx, Bx>,
116         bx: &mut Bx,
117         target: mir::BasicBlock,
118     ) {
119         let (lltarget, is_cleanupret) = self.llbb_with_landing_pad(fx, target);
120         if is_cleanupret {
121             // MSVC micro-optimization: generate a `ret` rather than a jump
122             // to a trampoline.
123             debug_assert!(base::wants_msvc_seh(fx.cx.tcx().sess));
124             bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
125         } else {
126             bx.br(lltarget);
127         }
128     }
129
130     /// Call `fn_ptr` of `fn_abi` with the arguments `llargs`, the optional
131     /// return destination `destination` and the cleanup function `cleanup`.
132     fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
133         &self,
134         fx: &mut FunctionCx<'a, 'tcx, Bx>,
135         bx: &mut Bx,
136         fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
137         fn_ptr: Bx::Value,
138         llargs: &[Bx::Value],
139         destination: Option<(ReturnDest<'tcx, Bx::Value>, mir::BasicBlock)>,
140         cleanup: Option<mir::BasicBlock>,
141         copied_constant_arguments: &[PlaceRef<'tcx, <Bx as BackendTypes>::Value>],
142     ) {
143         // If there is a cleanup block and the function we're calling can unwind, then
144         // do an invoke, otherwise do a call.
145         let fn_ty = bx.fn_decl_backend_type(&fn_abi);
146
147         let unwind_block = if let Some(cleanup) = cleanup.filter(|_| fn_abi.can_unwind) {
148             Some(self.llbb_with_cleanup(fx, cleanup))
149         } else if fx.mir[self.bb].is_cleanup
150             && fn_abi.can_unwind
151             && !base::wants_msvc_seh(fx.cx.tcx().sess)
152         {
153             // Exception must not propagate out of the execution of a cleanup (doing so
154             // can cause undefined behaviour). We insert a double unwind guard for
155             // functions that can potentially unwind to protect against this.
156             //
157             // This is not necessary for SEH which does not use successive unwinding
158             // like Itanium EH. EH frames in SEH are different from normal function
159             // frames and SEH will abort automatically if an exception tries to
160             // propagate out from cleanup.
161             Some(fx.double_unwind_guard())
162         } else {
163             None
164         };
165
166         if let Some(unwind_block) = unwind_block {
167             let ret_llbb = if let Some((_, target)) = destination {
168                 fx.llbb(target)
169             } else {
170                 fx.unreachable_block()
171             };
172             let invokeret = bx.invoke(
173                 fn_ty,
174                 Some(&fn_abi),
175                 fn_ptr,
176                 &llargs,
177                 ret_llbb,
178                 unwind_block,
179                 self.funclet(fx),
180             );
181             if fx.mir[self.bb].is_cleanup {
182                 bx.do_not_inline(invokeret);
183             }
184
185             if let Some((ret_dest, target)) = destination {
186                 bx.switch_to_block(fx.llbb(target));
187                 fx.set_debug_loc(bx, self.terminator.source_info);
188                 for tmp in copied_constant_arguments {
189                     bx.lifetime_end(tmp.llval, tmp.layout.size);
190                 }
191                 fx.store_return(bx, ret_dest, &fn_abi.ret, invokeret);
192             }
193         } else {
194             let llret = bx.call(fn_ty, Some(&fn_abi), fn_ptr, &llargs, self.funclet(fx));
195             if fx.mir[self.bb].is_cleanup {
196                 // Cleanup is always the cold path. Don't inline
197                 // drop glue. Also, when there is a deeply-nested
198                 // struct, there are "symmetry" issues that cause
199                 // exponential inlining - see issue #41696.
200                 bx.do_not_inline(llret);
201             }
202
203             if let Some((ret_dest, target)) = destination {
204                 for tmp in copied_constant_arguments {
205                     bx.lifetime_end(tmp.llval, tmp.layout.size);
206                 }
207                 fx.store_return(bx, ret_dest, &fn_abi.ret, llret);
208                 self.funclet_br(fx, bx, target);
209             } else {
210                 bx.unreachable();
211             }
212         }
213     }
214
215     /// Generates inline assembly with optional `destination` and `cleanup`.
216     fn do_inlineasm<Bx: BuilderMethods<'a, 'tcx>>(
217         &self,
218         fx: &mut FunctionCx<'a, 'tcx, Bx>,
219         bx: &mut Bx,
220         template: &[InlineAsmTemplatePiece],
221         operands: &[InlineAsmOperandRef<'tcx, Bx>],
222         options: InlineAsmOptions,
223         line_spans: &[Span],
224         destination: Option<mir::BasicBlock>,
225         cleanup: Option<mir::BasicBlock>,
226         instance: Instance<'_>,
227     ) {
228         if let Some(cleanup) = cleanup {
229             let ret_llbb = if let Some(target) = destination {
230                 fx.llbb(target)
231             } else {
232                 fx.unreachable_block()
233             };
234
235             bx.codegen_inline_asm(
236                 template,
237                 &operands,
238                 options,
239                 line_spans,
240                 instance,
241                 Some((ret_llbb, self.llbb_with_cleanup(fx, cleanup), self.funclet(fx))),
242             );
243         } else {
244             bx.codegen_inline_asm(template, &operands, options, line_spans, instance, None);
245
246             if let Some(target) = destination {
247                 self.funclet_br(fx, bx, target);
248             } else {
249                 bx.unreachable();
250             }
251         }
252     }
253 }
254
255 /// Codegen implementations for some terminator variants.
256 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
257     /// Generates code for a `Resume` terminator.
258     fn codegen_resume_terminator(&mut self, helper: TerminatorCodegenHelper<'tcx>, mut bx: Bx) {
259         if let Some(funclet) = helper.funclet(self) {
260             bx.cleanup_ret(funclet, None);
261         } else {
262             let slot = self.get_personality_slot(&mut bx);
263             let lp0 = slot.project_field(&mut bx, 0);
264             let lp0 = bx.load_operand(lp0).immediate();
265             let lp1 = slot.project_field(&mut bx, 1);
266             let lp1 = bx.load_operand(lp1).immediate();
267             slot.storage_dead(&mut bx);
268
269             let mut lp = bx.const_undef(self.landing_pad_type());
270             lp = bx.insert_value(lp, lp0, 0);
271             lp = bx.insert_value(lp, lp1, 1);
272             bx.resume(lp);
273         }
274     }
275
276     fn codegen_switchint_terminator(
277         &mut self,
278         helper: TerminatorCodegenHelper<'tcx>,
279         mut bx: Bx,
280         discr: &mir::Operand<'tcx>,
281         switch_ty: Ty<'tcx>,
282         targets: &SwitchTargets,
283     ) {
284         let discr = self.codegen_operand(&mut bx, &discr);
285         // `switch_ty` is redundant, sanity-check that.
286         assert_eq!(discr.layout.ty, switch_ty);
287         let mut target_iter = targets.iter();
288         if target_iter.len() == 1 {
289             // If there are two targets (one conditional, one fallback), emit br instead of switch
290             let (test_value, target) = target_iter.next().unwrap();
291             let lltrue = helper.llbb_with_cleanup(self, target);
292             let llfalse = helper.llbb_with_cleanup(self, targets.otherwise());
293             if switch_ty == bx.tcx().types.bool {
294                 // Don't generate trivial icmps when switching on bool
295                 match test_value {
296                     0 => bx.cond_br(discr.immediate(), llfalse, lltrue),
297                     1 => bx.cond_br(discr.immediate(), lltrue, llfalse),
298                     _ => bug!(),
299                 }
300             } else {
301                 let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
302                 let llval = bx.const_uint_big(switch_llty, test_value);
303                 let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
304                 bx.cond_br(cmp, lltrue, llfalse);
305             }
306         } else {
307             bx.switch(
308                 discr.immediate(),
309                 helper.llbb_with_cleanup(self, targets.otherwise()),
310                 target_iter.map(|(value, target)| (value, helper.llbb_with_cleanup(self, target))),
311             );
312         }
313     }
314
315     fn codegen_return_terminator(&mut self, mut bx: Bx) {
316         // Call `va_end` if this is the definition of a C-variadic function.
317         if self.fn_abi.c_variadic {
318             // The `VaList` "spoofed" argument is just after all the real arguments.
319             let va_list_arg_idx = self.fn_abi.args.len();
320             match self.locals[mir::Local::new(1 + va_list_arg_idx)] {
321                 LocalRef::Place(va_list) => {
322                     bx.va_end(va_list.llval);
323                 }
324                 _ => bug!("C-variadic function must have a `VaList` place"),
325             }
326         }
327         if self.fn_abi.ret.layout.abi.is_uninhabited() {
328             // Functions with uninhabited return values are marked `noreturn`,
329             // so we should make sure that we never actually do.
330             // We play it safe by using a well-defined `abort`, but we could go for immediate UB
331             // if that turns out to be helpful.
332             bx.abort();
333             // `abort` does not terminate the block, so we still need to generate
334             // an `unreachable` terminator after it.
335             bx.unreachable();
336             return;
337         }
338         let llval = match &self.fn_abi.ret.mode {
339             PassMode::Ignore | PassMode::Indirect { .. } => {
340                 bx.ret_void();
341                 return;
342             }
343
344             PassMode::Direct(_) | PassMode::Pair(..) => {
345                 let op = self.codegen_consume(&mut bx, mir::Place::return_place().as_ref());
346                 if let Ref(llval, _, align) = op.val {
347                     bx.load(bx.backend_type(op.layout), llval, align)
348                 } else {
349                     op.immediate_or_packed_pair(&mut bx)
350                 }
351             }
352
353             PassMode::Cast(cast_ty, _) => {
354                 let op = match self.locals[mir::RETURN_PLACE] {
355                     LocalRef::Operand(Some(op)) => op,
356                     LocalRef::Operand(None) => bug!("use of return before def"),
357                     LocalRef::Place(cg_place) => OperandRef {
358                         val: Ref(cg_place.llval, None, cg_place.align),
359                         layout: cg_place.layout,
360                     },
361                     LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
362                 };
363                 let llslot = match op.val {
364                     Immediate(_) | Pair(..) => {
365                         let scratch = PlaceRef::alloca(&mut bx, self.fn_abi.ret.layout);
366                         op.val.store(&mut bx, scratch);
367                         scratch.llval
368                     }
369                     Ref(llval, _, align) => {
370                         assert_eq!(align, op.layout.align.abi, "return place is unaligned!");
371                         llval
372                     }
373                 };
374                 let ty = bx.cast_backend_type(cast_ty);
375                 let addr = bx.pointercast(llslot, bx.type_ptr_to(ty));
376                 bx.load(ty, addr, self.fn_abi.ret.layout.align.abi)
377             }
378         };
379         bx.ret(llval);
380     }
381
382     #[tracing::instrument(level = "trace", skip(self, helper, bx))]
383     fn codegen_drop_terminator(
384         &mut self,
385         helper: TerminatorCodegenHelper<'tcx>,
386         mut bx: Bx,
387         location: mir::Place<'tcx>,
388         target: mir::BasicBlock,
389         unwind: Option<mir::BasicBlock>,
390     ) {
391         let ty = location.ty(self.mir, bx.tcx()).ty;
392         let ty = self.monomorphize(ty);
393         let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty);
394
395         if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
396             // we don't actually need to drop anything.
397             helper.funclet_br(self, &mut bx, target);
398             return;
399         }
400
401         let place = self.codegen_place(&mut bx, location.as_ref());
402         let (args1, args2);
403         let mut args = if let Some(llextra) = place.llextra {
404             args2 = [place.llval, llextra];
405             &args2[..]
406         } else {
407             args1 = [place.llval];
408             &args1[..]
409         };
410         let (drop_fn, fn_abi) = match ty.kind() {
411             // FIXME(eddyb) perhaps move some of this logic into
412             // `Instance::resolve_drop_in_place`?
413             ty::Dynamic(_, _, ty::Dyn) => {
414                 // IN THIS ARM, WE HAVE:
415                 // ty = *mut (dyn Trait)
416                 // which is: exists<T> ( *mut T,    Vtable<T: Trait> )
417                 //                       args[0]    args[1]
418                 //
419                 // args = ( Data, Vtable )
420                 //                  |
421                 //                  v
422                 //                /-------\
423                 //                | ...   |
424                 //                \-------/
425                 //
426                 let virtual_drop = Instance {
427                     def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
428                     substs: drop_fn.substs,
429                 };
430                 debug!("ty = {:?}", ty);
431                 debug!("drop_fn = {:?}", drop_fn);
432                 debug!("args = {:?}", args);
433                 let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
434                 let vtable = args[1];
435                 // Truncate vtable off of args list
436                 args = &args[..1];
437                 (
438                     meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
439                         .get_fn(&mut bx, vtable, ty, &fn_abi),
440                     fn_abi,
441                 )
442             }
443             ty::Dynamic(_, _, ty::DynStar) => {
444                 // IN THIS ARM, WE HAVE:
445                 // ty = *mut (dyn* Trait)
446                 // which is: *mut exists<T: sizeof(T) == sizeof(usize)> (T, Vtable<T: Trait>)
447                 //
448                 // args = [ * ]
449                 //          |
450                 //          v
451                 //      ( Data, Vtable )
452                 //                |
453                 //                v
454                 //              /-------\
455                 //              | ...   |
456                 //              \-------/
457                 //
458                 //
459                 // WE CAN CONVERT THIS INTO THE ABOVE LOGIC BY DOING
460                 //
461                 // data = &(*args[0]).0    // gives a pointer to Data above (really the same pointer)
462                 // vtable = (*args[0]).1   // loads the vtable out
463                 // (data, vtable)          // an equivalent Rust `*mut dyn Trait`
464                 //
465                 // SO THEN WE CAN USE THE ABOVE CODE.
466                 let virtual_drop = Instance {
467                     def: ty::InstanceDef::Virtual(drop_fn.def_id(), 0),
468                     substs: drop_fn.substs,
469                 };
470                 debug!("ty = {:?}", ty);
471                 debug!("drop_fn = {:?}", drop_fn);
472                 debug!("args = {:?}", args);
473                 let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
474                 let data = args[0];
475                 let data_ty = bx.cx().backend_type(place.layout);
476                 let vtable_ptr =
477                     bx.gep(data_ty, data, &[bx.cx().const_i32(0), bx.cx().const_i32(1)]);
478                 let vtable = bx.load(bx.type_i8p(), vtable_ptr, abi::Align::ONE);
479                 // Truncate vtable off of args list
480                 args = &args[..1];
481                 debug!("args' = {:?}", args);
482                 (
483                     meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
484                         .get_fn(&mut bx, vtable, ty, &fn_abi),
485                     fn_abi,
486                 )
487             }
488             _ => (bx.get_fn_addr(drop_fn), bx.fn_abi_of_instance(drop_fn, ty::List::empty())),
489         };
490         helper.do_call(
491             self,
492             &mut bx,
493             fn_abi,
494             drop_fn,
495             args,
496             Some((ReturnDest::Nothing, target)),
497             unwind,
498             &[],
499         );
500     }
501
502     fn codegen_assert_terminator(
503         &mut self,
504         helper: TerminatorCodegenHelper<'tcx>,
505         mut bx: Bx,
506         terminator: &mir::Terminator<'tcx>,
507         cond: &mir::Operand<'tcx>,
508         expected: bool,
509         msg: &mir::AssertMessage<'tcx>,
510         target: mir::BasicBlock,
511         cleanup: Option<mir::BasicBlock>,
512     ) {
513         let span = terminator.source_info.span;
514         let cond = self.codegen_operand(&mut bx, cond).immediate();
515         let mut const_cond = bx.const_to_opt_u128(cond, false).map(|c| c == 1);
516
517         // This case can currently arise only from functions marked
518         // with #[rustc_inherit_overflow_checks] and inlined from
519         // another crate (mostly core::num generic/#[inline] fns),
520         // while the current crate doesn't use overflow checks.
521         // NOTE: Unlike binops, negation doesn't have its own
522         // checked operation, just a comparison with the minimum
523         // value, so we have to check for the assert message.
524         if !bx.check_overflow() {
525             if let AssertKind::OverflowNeg(_) = *msg {
526                 const_cond = Some(expected);
527             }
528         }
529
530         // Don't codegen the panic block if success if known.
531         if const_cond == Some(expected) {
532             helper.funclet_br(self, &mut bx, target);
533             return;
534         }
535
536         // Pass the condition through llvm.expect for branch hinting.
537         let cond = bx.expect(cond, expected);
538
539         // Create the failure block and the conditional branch to it.
540         let lltarget = helper.llbb_with_cleanup(self, target);
541         let panic_block = bx.append_sibling_block("panic");
542         if expected {
543             bx.cond_br(cond, lltarget, panic_block);
544         } else {
545             bx.cond_br(cond, panic_block, lltarget);
546         }
547
548         // After this point, bx is the block for the call to panic.
549         bx.switch_to_block(panic_block);
550         self.set_debug_loc(&mut bx, terminator.source_info);
551
552         // Get the location information.
553         let location = self.get_caller_location(&mut bx, terminator.source_info).immediate();
554
555         // Put together the arguments to the panic entry point.
556         let (lang_item, args) = match msg {
557             AssertKind::BoundsCheck { ref len, ref index } => {
558                 let len = self.codegen_operand(&mut bx, len).immediate();
559                 let index = self.codegen_operand(&mut bx, index).immediate();
560                 // It's `fn panic_bounds_check(index: usize, len: usize)`,
561                 // and `#[track_caller]` adds an implicit third argument.
562                 (LangItem::PanicBoundsCheck, vec![index, len, location])
563             }
564             _ => {
565                 let msg = bx.const_str(msg.description());
566                 // It's `pub fn panic(expr: &str)`, with the wide reference being passed
567                 // as two arguments, and `#[track_caller]` adds an implicit third argument.
568                 (LangItem::Panic, vec![msg.0, msg.1, location])
569             }
570         };
571
572         let (fn_abi, llfn) = common::build_langcall(&bx, Some(span), lang_item);
573
574         // Codegen the actual panic invoke/call.
575         helper.do_call(self, &mut bx, fn_abi, llfn, &args, None, cleanup, &[]);
576     }
577
578     fn codegen_abort_terminator(
579         &mut self,
580         helper: TerminatorCodegenHelper<'tcx>,
581         mut bx: Bx,
582         terminator: &mir::Terminator<'tcx>,
583     ) {
584         let span = terminator.source_info.span;
585         self.set_debug_loc(&mut bx, terminator.source_info);
586
587         // Obtain the panic entry point.
588         let (fn_abi, llfn) = common::build_langcall(&bx, Some(span), LangItem::PanicNoUnwind);
589
590         // Codegen the actual panic invoke/call.
591         helper.do_call(self, &mut bx, fn_abi, llfn, &[], None, None, &[]);
592     }
593
594     /// Returns `true` if this is indeed a panic intrinsic and codegen is done.
595     fn codegen_panic_intrinsic(
596         &mut self,
597         helper: &TerminatorCodegenHelper<'tcx>,
598         bx: &mut Bx,
599         intrinsic: Option<Symbol>,
600         instance: Option<Instance<'tcx>>,
601         source_info: mir::SourceInfo,
602         target: Option<mir::BasicBlock>,
603         cleanup: Option<mir::BasicBlock>,
604     ) -> bool {
605         // Emit a panic or a no-op for `assert_*` intrinsics.
606         // These are intrinsics that compile to panics so that we can get a message
607         // which mentions the offending type, even from a const context.
608         #[derive(Debug, PartialEq)]
609         enum AssertIntrinsic {
610             Inhabited,
611             ZeroValid,
612             UninitValid,
613         }
614         let panic_intrinsic = intrinsic.and_then(|i| match i {
615             sym::assert_inhabited => Some(AssertIntrinsic::Inhabited),
616             sym::assert_zero_valid => Some(AssertIntrinsic::ZeroValid),
617             sym::assert_uninit_valid => Some(AssertIntrinsic::UninitValid),
618             _ => None,
619         });
620         if let Some(intrinsic) = panic_intrinsic {
621             use AssertIntrinsic::*;
622
623             let ty = instance.unwrap().substs.type_at(0);
624             let layout = bx.layout_of(ty);
625             let do_panic = match intrinsic {
626                 Inhabited => layout.abi.is_uninhabited(),
627                 ZeroValid => !bx.tcx().permits_zero_init(layout),
628                 UninitValid => !bx.tcx().permits_uninit_init(layout),
629             };
630             if do_panic {
631                 let msg_str = with_no_visible_paths!({
632                     with_no_trimmed_paths!({
633                         if layout.abi.is_uninhabited() {
634                             // Use this error even for the other intrinsics as it is more precise.
635                             format!("attempted to instantiate uninhabited type `{}`", ty)
636                         } else if intrinsic == ZeroValid {
637                             format!("attempted to zero-initialize type `{}`, which is invalid", ty)
638                         } else {
639                             format!(
640                                 "attempted to leave type `{}` uninitialized, which is invalid",
641                                 ty
642                             )
643                         }
644                     })
645                 });
646                 let msg = bx.const_str(&msg_str);
647                 let location = self.get_caller_location(bx, source_info).immediate();
648
649                 // Obtain the panic entry point.
650                 let (fn_abi, llfn) =
651                     common::build_langcall(bx, Some(source_info.span), LangItem::Panic);
652
653                 // Codegen the actual panic invoke/call.
654                 helper.do_call(
655                     self,
656                     bx,
657                     fn_abi,
658                     llfn,
659                     &[msg.0, msg.1, location],
660                     target.as_ref().map(|bb| (ReturnDest::Nothing, *bb)),
661                     cleanup,
662                     &[],
663                 );
664             } else {
665                 // a NOP
666                 let target = target.unwrap();
667                 helper.funclet_br(self, bx, target)
668             }
669             true
670         } else {
671             false
672         }
673     }
674
675     fn codegen_call_terminator(
676         &mut self,
677         helper: TerminatorCodegenHelper<'tcx>,
678         mut bx: Bx,
679         terminator: &mir::Terminator<'tcx>,
680         func: &mir::Operand<'tcx>,
681         args: &[mir::Operand<'tcx>],
682         destination: mir::Place<'tcx>,
683         target: Option<mir::BasicBlock>,
684         cleanup: Option<mir::BasicBlock>,
685         fn_span: Span,
686     ) {
687         let source_info = terminator.source_info;
688         let span = source_info.span;
689
690         // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
691         let callee = self.codegen_operand(&mut bx, func);
692
693         let (instance, mut llfn) = match *callee.layout.ty.kind() {
694             ty::FnDef(def_id, substs) => (
695                 Some(
696                     ty::Instance::resolve(bx.tcx(), ty::ParamEnv::reveal_all(), def_id, substs)
697                         .unwrap()
698                         .unwrap()
699                         .polymorphize(bx.tcx()),
700                 ),
701                 None,
702             ),
703             ty::FnPtr(_) => (None, Some(callee.immediate())),
704             _ => bug!("{} is not callable", callee.layout.ty),
705         };
706         let def = instance.map(|i| i.def);
707
708         if let Some(ty::InstanceDef::DropGlue(_, None)) = def {
709             // Empty drop glue; a no-op.
710             let target = target.unwrap();
711             helper.funclet_br(self, &mut bx, target);
712             return;
713         }
714
715         // FIXME(eddyb) avoid computing this if possible, when `instance` is
716         // available - right now `sig` is only needed for getting the `abi`
717         // and figuring out how many extra args were passed to a C-variadic `fn`.
718         let sig = callee.layout.ty.fn_sig(bx.tcx());
719         let abi = sig.abi();
720
721         // Handle intrinsics old codegen wants Expr's for, ourselves.
722         let intrinsic = match def {
723             Some(ty::InstanceDef::Intrinsic(def_id)) => Some(bx.tcx().item_name(def_id)),
724             _ => None,
725         };
726
727         let extra_args = &args[sig.inputs().skip_binder().len()..];
728         let extra_args = bx.tcx().mk_type_list(extra_args.iter().map(|op_arg| {
729             let op_ty = op_arg.ty(self.mir, bx.tcx());
730             self.monomorphize(op_ty)
731         }));
732
733         let fn_abi = match instance {
734             Some(instance) => bx.fn_abi_of_instance(instance, extra_args),
735             None => bx.fn_abi_of_fn_ptr(sig, extra_args),
736         };
737
738         if intrinsic == Some(sym::transmute) {
739             if let Some(target) = target {
740                 self.codegen_transmute(&mut bx, &args[0], destination);
741                 helper.funclet_br(self, &mut bx, target);
742             } else {
743                 // If we are trying to transmute to an uninhabited type,
744                 // it is likely there is no allotted destination. In fact,
745                 // transmuting to an uninhabited type is UB, which means
746                 // we can do what we like. Here, we declare that transmuting
747                 // into an uninhabited type is impossible, so anything following
748                 // it must be unreachable.
749                 assert_eq!(fn_abi.ret.layout.abi, abi::Abi::Uninhabited);
750                 bx.unreachable();
751             }
752             return;
753         }
754
755         if self.codegen_panic_intrinsic(
756             &helper,
757             &mut bx,
758             intrinsic,
759             instance,
760             source_info,
761             target,
762             cleanup,
763         ) {
764             return;
765         }
766
767         // The arguments we'll be passing. Plus one to account for outptr, if used.
768         let arg_count = fn_abi.args.len() + fn_abi.ret.is_indirect() as usize;
769         let mut llargs = Vec::with_capacity(arg_count);
770
771         // Prepare the return value destination
772         let ret_dest = if target.is_some() {
773             let is_intrinsic = intrinsic.is_some();
774             self.make_return_dest(&mut bx, destination, &fn_abi.ret, &mut llargs, is_intrinsic)
775         } else {
776             ReturnDest::Nothing
777         };
778
779         if intrinsic == Some(sym::caller_location) {
780             if let Some(target) = target {
781                 let location = self
782                     .get_caller_location(&mut bx, mir::SourceInfo { span: fn_span, ..source_info });
783
784                 if let ReturnDest::IndirectOperand(tmp, _) = ret_dest {
785                     location.val.store(&mut bx, tmp);
786                 }
787                 self.store_return(&mut bx, ret_dest, &fn_abi.ret, location.immediate());
788                 helper.funclet_br(self, &mut bx, target);
789             }
790             return;
791         }
792
793         match intrinsic {
794             None | Some(sym::drop_in_place) => {}
795             Some(sym::copy_nonoverlapping) => unreachable!(),
796             Some(intrinsic) => {
797                 let dest = match ret_dest {
798                     _ if fn_abi.ret.is_indirect() => llargs[0],
799                     ReturnDest::Nothing => {
800                         bx.const_undef(bx.type_ptr_to(bx.arg_memory_ty(&fn_abi.ret)))
801                     }
802                     ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) => dst.llval,
803                     ReturnDest::DirectOperand(_) => {
804                         bug!("Cannot use direct operand with an intrinsic call")
805                     }
806                 };
807
808                 let args: Vec<_> = args
809                     .iter()
810                     .enumerate()
811                     .map(|(i, arg)| {
812                         // The indices passed to simd_shuffle* in the
813                         // third argument must be constant. This is
814                         // checked by const-qualification, which also
815                         // promotes any complex rvalues to constants.
816                         if i == 2 && intrinsic.as_str().starts_with("simd_shuffle") {
817                             if let mir::Operand::Constant(constant) = arg {
818                                 let c = self.eval_mir_constant(constant);
819                                 let (llval, ty) = self.simd_shuffle_indices(
820                                     &bx,
821                                     constant.span,
822                                     self.monomorphize(constant.ty()),
823                                     c,
824                                 );
825                                 return OperandRef {
826                                     val: Immediate(llval),
827                                     layout: bx.layout_of(ty),
828                                 };
829                             } else {
830                                 span_bug!(span, "shuffle indices must be constant");
831                             }
832                         }
833
834                         self.codegen_operand(&mut bx, arg)
835                     })
836                     .collect();
837
838                 Self::codegen_intrinsic_call(
839                     &mut bx,
840                     *instance.as_ref().unwrap(),
841                     &fn_abi,
842                     &args,
843                     dest,
844                     span,
845                 );
846
847                 if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
848                     self.store_return(&mut bx, ret_dest, &fn_abi.ret, dst.llval);
849                 }
850
851                 if let Some(target) = target {
852                     helper.funclet_br(self, &mut bx, target);
853                 } else {
854                     bx.unreachable();
855                 }
856
857                 return;
858             }
859         }
860
861         // Split the rust-call tupled arguments off.
862         let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
863             let (tup, args) = args.split_last().unwrap();
864             (args, Some(tup))
865         } else {
866             (args, None)
867         };
868
869         let mut copied_constant_arguments = vec![];
870         'make_args: for (i, arg) in first_args.iter().enumerate() {
871             let mut op = self.codegen_operand(&mut bx, arg);
872
873             if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
874                 match op.val {
875                     Pair(data_ptr, meta) => {
876                         // In the case of Rc<Self>, we need to explicitly pass a
877                         // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
878                         // that is understood elsewhere in the compiler as a method on
879                         // `dyn Trait`.
880                         // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
881                         // we get a value of a built-in pointer type
882                         'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
883                             && !op.layout.ty.is_region_ptr()
884                         {
885                             for i in 0..op.layout.fields.count() {
886                                 let field = op.extract_field(&mut bx, i);
887                                 if !field.layout.is_zst() {
888                                     // we found the one non-zero-sized field that is allowed
889                                     // now find *its* non-zero-sized field, or stop if it's a
890                                     // pointer
891                                     op = field;
892                                     continue 'descend_newtypes;
893                                 }
894                             }
895
896                             span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
897                         }
898
899                         // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
900                         // data pointer and vtable. Look up the method in the vtable, and pass
901                         // the data pointer as the first argument
902                         llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
903                             &mut bx,
904                             meta,
905                             op.layout.ty,
906                             &fn_abi,
907                         ));
908                         llargs.push(data_ptr);
909                         continue 'make_args;
910                     }
911                     Ref(data_ptr, Some(meta), _) => {
912                         // by-value dynamic dispatch
913                         llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
914                             &mut bx,
915                             meta,
916                             op.layout.ty,
917                             &fn_abi,
918                         ));
919                         llargs.push(data_ptr);
920                         continue;
921                     }
922                     Immediate(_) => {
923                         let ty::Ref(_, ty, _) = op.layout.ty.kind() else {
924                             span_bug!(span, "can't codegen a virtual call on {:#?}", op);
925                         };
926                         if !ty.is_dyn_star() {
927                             span_bug!(span, "can't codegen a virtual call on {:#?}", op);
928                         }
929                         // FIXME(dyn-star): Make sure this is done on a &dyn* receiver
930                         let place = op.deref(bx.cx());
931                         let data_ptr = place.project_field(&mut bx, 0);
932                         let meta_ptr = place.project_field(&mut bx, 1);
933                         let meta = bx.load_operand(meta_ptr);
934                         llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
935                             &mut bx,
936                             meta.immediate(),
937                             op.layout.ty,
938                             &fn_abi,
939                         ));
940                         llargs.push(data_ptr.llval);
941                         continue;
942                     }
943                     _ => {
944                         span_bug!(span, "can't codegen a virtual call on {:#?}", op);
945                     }
946                 }
947             }
948
949             // The callee needs to own the argument memory if we pass it
950             // by-ref, so make a local copy of non-immediate constants.
951             match (arg, op.val) {
952                 (&mir::Operand::Copy(_), Ref(_, None, _))
953                 | (&mir::Operand::Constant(_), Ref(_, None, _)) => {
954                     let tmp = PlaceRef::alloca(&mut bx, op.layout);
955                     bx.lifetime_start(tmp.llval, tmp.layout.size);
956                     op.val.store(&mut bx, tmp);
957                     op.val = Ref(tmp.llval, None, tmp.align);
958                     copied_constant_arguments.push(tmp);
959                 }
960                 _ => {}
961             }
962
963             self.codegen_argument(&mut bx, op, &mut llargs, &fn_abi.args[i]);
964         }
965         let num_untupled = untuple.map(|tup| {
966             self.codegen_arguments_untupled(
967                 &mut bx,
968                 tup,
969                 &mut llargs,
970                 &fn_abi.args[first_args.len()..],
971             )
972         });
973
974         let needs_location =
975             instance.map_or(false, |i| i.def.requires_caller_location(self.cx.tcx()));
976         if needs_location {
977             let mir_args = if let Some(num_untupled) = num_untupled {
978                 first_args.len() + num_untupled
979             } else {
980                 args.len()
981             };
982             assert_eq!(
983                 fn_abi.args.len(),
984                 mir_args + 1,
985                 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR: {:?} {:?} {:?}",
986                 instance,
987                 fn_span,
988                 fn_abi,
989             );
990             let location =
991                 self.get_caller_location(&mut bx, mir::SourceInfo { span: fn_span, ..source_info });
992             debug!(
993                 "codegen_call_terminator({:?}): location={:?} (fn_span {:?})",
994                 terminator, location, fn_span
995             );
996
997             let last_arg = fn_abi.args.last().unwrap();
998             self.codegen_argument(&mut bx, location, &mut llargs, last_arg);
999         }
1000
1001         let (is_indirect_call, fn_ptr) = match (llfn, instance) {
1002             (Some(llfn), _) => (true, llfn),
1003             (None, Some(instance)) => (false, bx.get_fn_addr(instance)),
1004             _ => span_bug!(span, "no llfn for call"),
1005         };
1006
1007         // For backends that support CFI using type membership (i.e., testing whether a given
1008         // pointer is associated with a type identifier).
1009         if bx.tcx().sess.is_sanitizer_cfi_enabled() && is_indirect_call {
1010             // Emit type metadata and checks.
1011             // FIXME(rcvalle): Add support for generalized identifiers.
1012             // FIXME(rcvalle): Create distinct unnamed MDNodes for internal identifiers.
1013             let typeid = typeid_for_fnabi(bx.tcx(), fn_abi);
1014             let typeid_metadata = self.cx.typeid_metadata(typeid);
1015
1016             // Test whether the function pointer is associated with the type identifier.
1017             let cond = bx.type_test(fn_ptr, typeid_metadata);
1018             let bb_pass = bx.append_sibling_block("type_test.pass");
1019             let bb_fail = bx.append_sibling_block("type_test.fail");
1020             bx.cond_br(cond, bb_pass, bb_fail);
1021
1022             bx.switch_to_block(bb_pass);
1023             helper.do_call(
1024                 self,
1025                 &mut bx,
1026                 fn_abi,
1027                 fn_ptr,
1028                 &llargs,
1029                 target.as_ref().map(|&target| (ret_dest, target)),
1030                 cleanup,
1031                 &copied_constant_arguments,
1032             );
1033
1034             bx.switch_to_block(bb_fail);
1035             bx.abort();
1036             bx.unreachable();
1037
1038             return;
1039         }
1040
1041         helper.do_call(
1042             self,
1043             &mut bx,
1044             fn_abi,
1045             fn_ptr,
1046             &llargs,
1047             target.as_ref().map(|&target| (ret_dest, target)),
1048             cleanup,
1049             &copied_constant_arguments,
1050         );
1051     }
1052
1053     fn codegen_asm_terminator(
1054         &mut self,
1055         helper: TerminatorCodegenHelper<'tcx>,
1056         mut bx: Bx,
1057         terminator: &mir::Terminator<'tcx>,
1058         template: &[ast::InlineAsmTemplatePiece],
1059         operands: &[mir::InlineAsmOperand<'tcx>],
1060         options: ast::InlineAsmOptions,
1061         line_spans: &[Span],
1062         destination: Option<mir::BasicBlock>,
1063         cleanup: Option<mir::BasicBlock>,
1064         instance: Instance<'_>,
1065     ) {
1066         let span = terminator.source_info.span;
1067
1068         let operands: Vec<_> = operands
1069             .iter()
1070             .map(|op| match *op {
1071                 mir::InlineAsmOperand::In { reg, ref value } => {
1072                     let value = self.codegen_operand(&mut bx, value);
1073                     InlineAsmOperandRef::In { reg, value }
1074                 }
1075                 mir::InlineAsmOperand::Out { reg, late, ref place } => {
1076                     let place = place.map(|place| self.codegen_place(&mut bx, place.as_ref()));
1077                     InlineAsmOperandRef::Out { reg, late, place }
1078                 }
1079                 mir::InlineAsmOperand::InOut { reg, late, ref in_value, ref out_place } => {
1080                     let in_value = self.codegen_operand(&mut bx, in_value);
1081                     let out_place =
1082                         out_place.map(|out_place| self.codegen_place(&mut bx, out_place.as_ref()));
1083                     InlineAsmOperandRef::InOut { reg, late, in_value, out_place }
1084                 }
1085                 mir::InlineAsmOperand::Const { ref value } => {
1086                     let const_value = self
1087                         .eval_mir_constant(value)
1088                         .unwrap_or_else(|_| span_bug!(span, "asm const cannot be resolved"));
1089                     let string = common::asm_const_to_str(
1090                         bx.tcx(),
1091                         span,
1092                         const_value,
1093                         bx.layout_of(value.ty()),
1094                     );
1095                     InlineAsmOperandRef::Const { string }
1096                 }
1097                 mir::InlineAsmOperand::SymFn { ref value } => {
1098                     let literal = self.monomorphize(value.literal);
1099                     if let ty::FnDef(def_id, substs) = *literal.ty().kind() {
1100                         let instance = ty::Instance::resolve_for_fn_ptr(
1101                             bx.tcx(),
1102                             ty::ParamEnv::reveal_all(),
1103                             def_id,
1104                             substs,
1105                         )
1106                         .unwrap();
1107                         InlineAsmOperandRef::SymFn { instance }
1108                     } else {
1109                         span_bug!(span, "invalid type for asm sym (fn)");
1110                     }
1111                 }
1112                 mir::InlineAsmOperand::SymStatic { def_id } => {
1113                     InlineAsmOperandRef::SymStatic { def_id }
1114                 }
1115             })
1116             .collect();
1117
1118         helper.do_inlineasm(
1119             self,
1120             &mut bx,
1121             template,
1122             &operands,
1123             options,
1124             line_spans,
1125             destination,
1126             cleanup,
1127             instance,
1128         );
1129     }
1130 }
1131
1132 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
1133     pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
1134         let llbb = self.llbb(bb);
1135         let mut bx = Bx::build(self.cx, llbb);
1136         let mir = self.mir;
1137         let data = &mir[bb];
1138
1139         debug!("codegen_block({:?}={:?})", bb, data);
1140
1141         for statement in &data.statements {
1142             bx = self.codegen_statement(bx, statement);
1143         }
1144
1145         self.codegen_terminator(bx, bb, data.terminator());
1146     }
1147
1148     fn codegen_terminator(
1149         &mut self,
1150         mut bx: Bx,
1151         bb: mir::BasicBlock,
1152         terminator: &'tcx mir::Terminator<'tcx>,
1153     ) {
1154         debug!("codegen_terminator: {:?}", terminator);
1155
1156         // Create the cleanup bundle, if needed.
1157         let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
1158         let helper = TerminatorCodegenHelper { bb, terminator, funclet_bb };
1159
1160         self.set_debug_loc(&mut bx, terminator.source_info);
1161         match terminator.kind {
1162             mir::TerminatorKind::Resume => self.codegen_resume_terminator(helper, bx),
1163
1164             mir::TerminatorKind::Abort => {
1165                 self.codegen_abort_terminator(helper, bx, terminator);
1166             }
1167
1168             mir::TerminatorKind::Goto { target } => {
1169                 helper.funclet_br(self, &mut bx, target);
1170             }
1171
1172             mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref targets } => {
1173                 self.codegen_switchint_terminator(helper, bx, discr, switch_ty, targets);
1174             }
1175
1176             mir::TerminatorKind::Return => {
1177                 self.codegen_return_terminator(bx);
1178             }
1179
1180             mir::TerminatorKind::Unreachable => {
1181                 bx.unreachable();
1182             }
1183
1184             mir::TerminatorKind::Drop { place, target, unwind } => {
1185                 self.codegen_drop_terminator(helper, bx, place, target, unwind);
1186             }
1187
1188             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
1189                 self.codegen_assert_terminator(
1190                     helper, bx, terminator, cond, expected, msg, target, cleanup,
1191                 );
1192             }
1193
1194             mir::TerminatorKind::DropAndReplace { .. } => {
1195                 bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
1196             }
1197
1198             mir::TerminatorKind::Call {
1199                 ref func,
1200                 ref args,
1201                 destination,
1202                 target,
1203                 cleanup,
1204                 from_hir_call: _,
1205                 fn_span,
1206             } => {
1207                 self.codegen_call_terminator(
1208                     helper,
1209                     bx,
1210                     terminator,
1211                     func,
1212                     args,
1213                     destination,
1214                     target,
1215                     cleanup,
1216                     fn_span,
1217                 );
1218             }
1219             mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Yield { .. } => {
1220                 bug!("generator ops in codegen")
1221             }
1222             mir::TerminatorKind::FalseEdge { .. } | mir::TerminatorKind::FalseUnwind { .. } => {
1223                 bug!("borrowck false edges in codegen")
1224             }
1225
1226             mir::TerminatorKind::InlineAsm {
1227                 template,
1228                 ref operands,
1229                 options,
1230                 line_spans,
1231                 destination,
1232                 cleanup,
1233             } => {
1234                 self.codegen_asm_terminator(
1235                     helper,
1236                     bx,
1237                     terminator,
1238                     template,
1239                     operands,
1240                     options,
1241                     line_spans,
1242                     destination,
1243                     cleanup,
1244                     self.instance,
1245                 );
1246             }
1247         }
1248     }
1249
1250     fn codegen_argument(
1251         &mut self,
1252         bx: &mut Bx,
1253         op: OperandRef<'tcx, Bx::Value>,
1254         llargs: &mut Vec<Bx::Value>,
1255         arg: &ArgAbi<'tcx, Ty<'tcx>>,
1256     ) {
1257         match arg.mode {
1258             PassMode::Ignore => return,
1259             PassMode::Cast(_, true) => {
1260                 // Fill padding with undef value, where applicable.
1261                 llargs.push(bx.const_undef(bx.reg_backend_type(&Reg::i32())));
1262             }
1263             PassMode::Pair(..) => match op.val {
1264                 Pair(a, b) => {
1265                     llargs.push(a);
1266                     llargs.push(b);
1267                     return;
1268                 }
1269                 _ => bug!("codegen_argument: {:?} invalid for pair argument", op),
1270             },
1271             PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => match op.val {
1272                 Ref(a, Some(b), _) => {
1273                     llargs.push(a);
1274                     llargs.push(b);
1275                     return;
1276                 }
1277                 _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op),
1278             },
1279             _ => {}
1280         }
1281
1282         // Force by-ref if we have to load through a cast pointer.
1283         let (mut llval, align, by_ref) = match op.val {
1284             Immediate(_) | Pair(..) => match arg.mode {
1285                 PassMode::Indirect { .. } | PassMode::Cast(..) => {
1286                     let scratch = PlaceRef::alloca(bx, arg.layout);
1287                     op.val.store(bx, scratch);
1288                     (scratch.llval, scratch.align, true)
1289                 }
1290                 _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false),
1291             },
1292             Ref(llval, _, align) => {
1293                 if arg.is_indirect() && align < arg.layout.align.abi {
1294                     // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
1295                     // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
1296                     // have scary latent bugs around.
1297
1298                     let scratch = PlaceRef::alloca(bx, arg.layout);
1299                     base::memcpy_ty(
1300                         bx,
1301                         scratch.llval,
1302                         scratch.align,
1303                         llval,
1304                         align,
1305                         op.layout,
1306                         MemFlags::empty(),
1307                     );
1308                     (scratch.llval, scratch.align, true)
1309                 } else {
1310                     (llval, align, true)
1311                 }
1312             }
1313         };
1314
1315         if by_ref && !arg.is_indirect() {
1316             // Have to load the argument, maybe while casting it.
1317             if let PassMode::Cast(ty, _) = &arg.mode {
1318                 let llty = bx.cast_backend_type(ty);
1319                 let addr = bx.pointercast(llval, bx.type_ptr_to(llty));
1320                 llval = bx.load(llty, addr, align.min(arg.layout.align.abi));
1321             } else {
1322                 // We can't use `PlaceRef::load` here because the argument
1323                 // may have a type we don't treat as immediate, but the ABI
1324                 // used for this call is passing it by-value. In that case,
1325                 // the load would just produce `OperandValue::Ref` instead
1326                 // of the `OperandValue::Immediate` we need for the call.
1327                 llval = bx.load(bx.backend_type(arg.layout), llval, align);
1328                 if let abi::Abi::Scalar(scalar) = arg.layout.abi {
1329                     if scalar.is_bool() {
1330                         bx.range_metadata(llval, WrappingRange { start: 0, end: 1 });
1331                     }
1332                 }
1333                 // We store bools as `i8` so we need to truncate to `i1`.
1334                 llval = bx.to_immediate(llval, arg.layout);
1335             }
1336         }
1337
1338         llargs.push(llval);
1339     }
1340
1341     fn codegen_arguments_untupled(
1342         &mut self,
1343         bx: &mut Bx,
1344         operand: &mir::Operand<'tcx>,
1345         llargs: &mut Vec<Bx::Value>,
1346         args: &[ArgAbi<'tcx, Ty<'tcx>>],
1347     ) -> usize {
1348         let tuple = self.codegen_operand(bx, operand);
1349
1350         // Handle both by-ref and immediate tuples.
1351         if let Ref(llval, None, align) = tuple.val {
1352             let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align);
1353             for i in 0..tuple.layout.fields.count() {
1354                 let field_ptr = tuple_ptr.project_field(bx, i);
1355                 let field = bx.load_operand(field_ptr);
1356                 self.codegen_argument(bx, field, llargs, &args[i]);
1357             }
1358         } else if let Ref(_, Some(_), _) = tuple.val {
1359             bug!("closure arguments must be sized")
1360         } else {
1361             // If the tuple is immediate, the elements are as well.
1362             for i in 0..tuple.layout.fields.count() {
1363                 let op = tuple.extract_field(bx, i);
1364                 self.codegen_argument(bx, op, llargs, &args[i]);
1365             }
1366         }
1367         tuple.layout.fields.count()
1368     }
1369
1370     fn get_caller_location(
1371         &mut self,
1372         bx: &mut Bx,
1373         mut source_info: mir::SourceInfo,
1374     ) -> OperandRef<'tcx, Bx::Value> {
1375         let tcx = bx.tcx();
1376
1377         let mut span_to_caller_location = |span: Span| {
1378             let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
1379             let caller = tcx.sess.source_map().lookup_char_pos(topmost.lo());
1380             let const_loc = tcx.const_caller_location((
1381                 Symbol::intern(&caller.file.name.prefer_remapped().to_string_lossy()),
1382                 caller.line as u32,
1383                 caller.col_display as u32 + 1,
1384             ));
1385             OperandRef::from_const(bx, const_loc, bx.tcx().caller_location_ty())
1386         };
1387
1388         // Walk up the `SourceScope`s, in case some of them are from MIR inlining.
1389         // If so, the starting `source_info.span` is in the innermost inlined
1390         // function, and will be replaced with outer callsite spans as long
1391         // as the inlined functions were `#[track_caller]`.
1392         loop {
1393             let scope_data = &self.mir.source_scopes[source_info.scope];
1394
1395             if let Some((callee, callsite_span)) = scope_data.inlined {
1396                 // Stop inside the most nested non-`#[track_caller]` function,
1397                 // before ever reaching its caller (which is irrelevant).
1398                 if !callee.def.requires_caller_location(tcx) {
1399                     return span_to_caller_location(source_info.span);
1400                 }
1401                 source_info.span = callsite_span;
1402             }
1403
1404             // Skip past all of the parents with `inlined: None`.
1405             match scope_data.inlined_parent_scope {
1406                 Some(parent) => source_info.scope = parent,
1407                 None => break,
1408             }
1409         }
1410
1411         // No inlined `SourceScope`s, or all of them were `#[track_caller]`.
1412         self.caller_location.unwrap_or_else(|| span_to_caller_location(source_info.span))
1413     }
1414
1415     fn get_personality_slot(&mut self, bx: &mut Bx) -> PlaceRef<'tcx, Bx::Value> {
1416         let cx = bx.cx();
1417         if let Some(slot) = self.personality_slot {
1418             slot
1419         } else {
1420             let layout = cx.layout_of(
1421                 cx.tcx().intern_tup(&[cx.tcx().mk_mut_ptr(cx.tcx().types.u8), cx.tcx().types.i32]),
1422             );
1423             let slot = PlaceRef::alloca(bx, layout);
1424             self.personality_slot = Some(slot);
1425             slot
1426         }
1427     }
1428
1429     /// Returns the landing/cleanup pad wrapper around the given basic block.
1430     // FIXME(eddyb) rename this to `eh_pad_for`.
1431     fn landing_pad_for(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1432         if let Some(landing_pad) = self.landing_pads[bb] {
1433             return landing_pad;
1434         }
1435
1436         let landing_pad = self.landing_pad_for_uncached(bb);
1437         self.landing_pads[bb] = Some(landing_pad);
1438         landing_pad
1439     }
1440
1441     // FIXME(eddyb) rename this to `eh_pad_for_uncached`.
1442     fn landing_pad_for_uncached(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1443         let llbb = self.llbb(bb);
1444         if base::wants_msvc_seh(self.cx.sess()) {
1445             let funclet;
1446             let ret_llbb;
1447             match self.mir[bb].terminator.as_ref().map(|t| &t.kind) {
1448                 // This is a basic block that we're aborting the program for,
1449                 // notably in an `extern` function. These basic blocks are inserted
1450                 // so that we assert that `extern` functions do indeed not panic,
1451                 // and if they do we abort the process.
1452                 //
1453                 // On MSVC these are tricky though (where we're doing funclets). If
1454                 // we were to do a cleanuppad (like below) the normal functions like
1455                 // `longjmp` would trigger the abort logic, terminating the
1456                 // program. Instead we insert the equivalent of `catch(...)` for C++
1457                 // which magically doesn't trigger when `longjmp` files over this
1458                 // frame.
1459                 //
1460                 // Lots more discussion can be found on #48251 but this codegen is
1461                 // modeled after clang's for:
1462                 //
1463                 //      try {
1464                 //          foo();
1465                 //      } catch (...) {
1466                 //          bar();
1467                 //      }
1468                 Some(&mir::TerminatorKind::Abort) => {
1469                     let cs_llbb =
1470                         Bx::append_block(self.cx, self.llfn, &format!("cs_funclet{:?}", bb));
1471                     let cp_llbb =
1472                         Bx::append_block(self.cx, self.llfn, &format!("cp_funclet{:?}", bb));
1473                     ret_llbb = cs_llbb;
1474
1475                     let mut cs_bx = Bx::build(self.cx, cs_llbb);
1476                     let cs = cs_bx.catch_switch(None, None, &[cp_llbb]);
1477
1478                     // The "null" here is actually a RTTI type descriptor for the
1479                     // C++ personality function, but `catch (...)` has no type so
1480                     // it's null. The 64 here is actually a bitfield which
1481                     // represents that this is a catch-all block.
1482                     let mut cp_bx = Bx::build(self.cx, cp_llbb);
1483                     let null = cp_bx.const_null(
1484                         cp_bx.type_i8p_ext(cp_bx.cx().data_layout().instruction_address_space),
1485                     );
1486                     let sixty_four = cp_bx.const_i32(64);
1487                     funclet = cp_bx.catch_pad(cs, &[null, sixty_four, null]);
1488                     cp_bx.br(llbb);
1489                 }
1490                 _ => {
1491                     let cleanup_llbb =
1492                         Bx::append_block(self.cx, self.llfn, &format!("funclet_{:?}", bb));
1493                     ret_llbb = cleanup_llbb;
1494                     let mut cleanup_bx = Bx::build(self.cx, cleanup_llbb);
1495                     funclet = cleanup_bx.cleanup_pad(None, &[]);
1496                     cleanup_bx.br(llbb);
1497                 }
1498             }
1499             self.funclets[bb] = Some(funclet);
1500             ret_llbb
1501         } else {
1502             let cleanup_llbb = Bx::append_block(self.cx, self.llfn, "cleanup");
1503             let mut cleanup_bx = Bx::build(self.cx, cleanup_llbb);
1504
1505             let llpersonality = self.cx.eh_personality();
1506             let llretty = self.landing_pad_type();
1507             let lp = cleanup_bx.cleanup_landing_pad(llretty, llpersonality);
1508
1509             let slot = self.get_personality_slot(&mut cleanup_bx);
1510             slot.storage_live(&mut cleanup_bx);
1511             Pair(cleanup_bx.extract_value(lp, 0), cleanup_bx.extract_value(lp, 1))
1512                 .store(&mut cleanup_bx, slot);
1513
1514             cleanup_bx.br(llbb);
1515             cleanup_llbb
1516         }
1517     }
1518
1519     fn landing_pad_type(&self) -> Bx::Type {
1520         let cx = self.cx;
1521         cx.type_struct(&[cx.type_i8p(), cx.type_i32()], false)
1522     }
1523
1524     fn unreachable_block(&mut self) -> Bx::BasicBlock {
1525         self.unreachable_block.unwrap_or_else(|| {
1526             let llbb = Bx::append_block(self.cx, self.llfn, "unreachable");
1527             let mut bx = Bx::build(self.cx, llbb);
1528             bx.unreachable();
1529             self.unreachable_block = Some(llbb);
1530             llbb
1531         })
1532     }
1533
1534     fn double_unwind_guard(&mut self) -> Bx::BasicBlock {
1535         self.double_unwind_guard.unwrap_or_else(|| {
1536             assert!(!base::wants_msvc_seh(self.cx.sess()));
1537
1538             let llbb = Bx::append_block(self.cx, self.llfn, "abort");
1539             let mut bx = Bx::build(self.cx, llbb);
1540             self.set_debug_loc(&mut bx, mir::SourceInfo::outermost(self.mir.span));
1541
1542             let llpersonality = self.cx.eh_personality();
1543             let llretty = self.landing_pad_type();
1544             bx.cleanup_landing_pad(llretty, llpersonality);
1545
1546             let (fn_abi, fn_ptr) = common::build_langcall(&bx, None, LangItem::PanicNoUnwind);
1547             let fn_ty = bx.fn_decl_backend_type(&fn_abi);
1548
1549             let llret = bx.call(fn_ty, Some(&fn_abi), fn_ptr, &[], None);
1550             bx.do_not_inline(llret);
1551
1552             bx.unreachable();
1553
1554             self.double_unwind_guard = Some(llbb);
1555             llbb
1556         })
1557     }
1558
1559     /// Get the backend `BasicBlock` for a MIR `BasicBlock`, either already
1560     /// cached in `self.cached_llbbs`, or created on demand (and cached).
1561     // FIXME(eddyb) rename `llbb` and other `ll`-prefixed things to use a
1562     // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbb`).
1563     pub fn llbb(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1564         self.cached_llbbs[bb].unwrap_or_else(|| {
1565             // FIXME(eddyb) only name the block if `fewer_names` is `false`.
1566             let llbb = Bx::append_block(self.cx, self.llfn, &format!("{:?}", bb));
1567             self.cached_llbbs[bb] = Some(llbb);
1568             llbb
1569         })
1570     }
1571
1572     fn make_return_dest(
1573         &mut self,
1574         bx: &mut Bx,
1575         dest: mir::Place<'tcx>,
1576         fn_ret: &ArgAbi<'tcx, Ty<'tcx>>,
1577         llargs: &mut Vec<Bx::Value>,
1578         is_intrinsic: bool,
1579     ) -> ReturnDest<'tcx, Bx::Value> {
1580         // If the return is ignored, we can just return a do-nothing `ReturnDest`.
1581         if fn_ret.is_ignore() {
1582             return ReturnDest::Nothing;
1583         }
1584         let dest = if let Some(index) = dest.as_local() {
1585             match self.locals[index] {
1586                 LocalRef::Place(dest) => dest,
1587                 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
1588                 LocalRef::Operand(None) => {
1589                     // Handle temporary places, specifically `Operand` ones, as
1590                     // they don't have `alloca`s.
1591                     return if fn_ret.is_indirect() {
1592                         // Odd, but possible, case, we have an operand temporary,
1593                         // but the calling convention has an indirect return.
1594                         let tmp = PlaceRef::alloca(bx, fn_ret.layout);
1595                         tmp.storage_live(bx);
1596                         llargs.push(tmp.llval);
1597                         ReturnDest::IndirectOperand(tmp, index)
1598                     } else if is_intrinsic {
1599                         // Currently, intrinsics always need a location to store
1600                         // the result, so we create a temporary `alloca` for the
1601                         // result.
1602                         let tmp = PlaceRef::alloca(bx, fn_ret.layout);
1603                         tmp.storage_live(bx);
1604                         ReturnDest::IndirectOperand(tmp, index)
1605                     } else {
1606                         ReturnDest::DirectOperand(index)
1607                     };
1608                 }
1609                 LocalRef::Operand(Some(_)) => {
1610                     bug!("place local already assigned to");
1611                 }
1612             }
1613         } else {
1614             self.codegen_place(
1615                 bx,
1616                 mir::PlaceRef { local: dest.local, projection: &dest.projection },
1617             )
1618         };
1619         if fn_ret.is_indirect() {
1620             if dest.align < dest.layout.align.abi {
1621                 // Currently, MIR code generation does not create calls
1622                 // that store directly to fields of packed structs (in
1623                 // fact, the calls it creates write only to temps).
1624                 //
1625                 // If someone changes that, please update this code path
1626                 // to create a temporary.
1627                 span_bug!(self.mir.span, "can't directly store to unaligned value");
1628             }
1629             llargs.push(dest.llval);
1630             ReturnDest::Nothing
1631         } else {
1632             ReturnDest::Store(dest)
1633         }
1634     }
1635
1636     fn codegen_transmute(&mut self, bx: &mut Bx, src: &mir::Operand<'tcx>, dst: mir::Place<'tcx>) {
1637         if let Some(index) = dst.as_local() {
1638             match self.locals[index] {
1639                 LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
1640                 LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"),
1641                 LocalRef::Operand(None) => {
1642                     let dst_layout = bx.layout_of(self.monomorphized_place_ty(dst.as_ref()));
1643                     assert!(!dst_layout.ty.has_erasable_regions());
1644                     let place = PlaceRef::alloca(bx, dst_layout);
1645                     place.storage_live(bx);
1646                     self.codegen_transmute_into(bx, src, place);
1647                     let op = bx.load_operand(place);
1648                     place.storage_dead(bx);
1649                     self.locals[index] = LocalRef::Operand(Some(op));
1650                     self.debug_introduce_local(bx, index);
1651                 }
1652                 LocalRef::Operand(Some(op)) => {
1653                     assert!(op.layout.is_zst(), "assigning to initialized SSAtemp");
1654                 }
1655             }
1656         } else {
1657             let dst = self.codegen_place(bx, dst.as_ref());
1658             self.codegen_transmute_into(bx, src, dst);
1659         }
1660     }
1661
1662     fn codegen_transmute_into(
1663         &mut self,
1664         bx: &mut Bx,
1665         src: &mir::Operand<'tcx>,
1666         dst: PlaceRef<'tcx, Bx::Value>,
1667     ) {
1668         let src = self.codegen_operand(bx, src);
1669
1670         // Special-case transmutes between scalars as simple bitcasts.
1671         match (src.layout.abi, dst.layout.abi) {
1672             (abi::Abi::Scalar(src_scalar), abi::Abi::Scalar(dst_scalar)) => {
1673                 // HACK(eddyb) LLVM doesn't like `bitcast`s between pointers and non-pointers.
1674                 if (src_scalar.primitive() == abi::Pointer)
1675                     == (dst_scalar.primitive() == abi::Pointer)
1676                 {
1677                     assert_eq!(src.layout.size, dst.layout.size);
1678
1679                     // NOTE(eddyb) the `from_immediate` and `to_immediate_scalar`
1680                     // conversions allow handling `bool`s the same as `u8`s.
1681                     let src = bx.from_immediate(src.immediate());
1682                     let src_as_dst = bx.bitcast(src, bx.backend_type(dst.layout));
1683                     Immediate(bx.to_immediate_scalar(src_as_dst, dst_scalar)).store(bx, dst);
1684                     return;
1685                 }
1686             }
1687             _ => {}
1688         }
1689
1690         let llty = bx.backend_type(src.layout);
1691         let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty));
1692         let align = src.layout.align.abi.min(dst.align);
1693         src.val.store(bx, PlaceRef::new_sized_aligned(cast_ptr, src.layout, align));
1694     }
1695
1696     // Stores the return value of a function call into it's final location.
1697     fn store_return(
1698         &mut self,
1699         bx: &mut Bx,
1700         dest: ReturnDest<'tcx, Bx::Value>,
1701         ret_abi: &ArgAbi<'tcx, Ty<'tcx>>,
1702         llval: Bx::Value,
1703     ) {
1704         use self::ReturnDest::*;
1705
1706         match dest {
1707             Nothing => (),
1708             Store(dst) => bx.store_arg(&ret_abi, llval, dst),
1709             IndirectOperand(tmp, index) => {
1710                 let op = bx.load_operand(tmp);
1711                 tmp.storage_dead(bx);
1712                 self.locals[index] = LocalRef::Operand(Some(op));
1713                 self.debug_introduce_local(bx, index);
1714             }
1715             DirectOperand(index) => {
1716                 // If there is a cast, we have to store and reload.
1717                 let op = if let PassMode::Cast(..) = ret_abi.mode {
1718                     let tmp = PlaceRef::alloca(bx, ret_abi.layout);
1719                     tmp.storage_live(bx);
1720                     bx.store_arg(&ret_abi, llval, tmp);
1721                     let op = bx.load_operand(tmp);
1722                     tmp.storage_dead(bx);
1723                     op
1724                 } else {
1725                     OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout)
1726                 };
1727                 self.locals[index] = LocalRef::Operand(Some(op));
1728                 self.debug_introduce_local(bx, index);
1729             }
1730         }
1731     }
1732 }
1733
1734 enum ReturnDest<'tcx, V> {
1735     // Do nothing; the return value is indirect or ignored.
1736     Nothing,
1737     // Store the return value to the pointer.
1738     Store(PlaceRef<'tcx, V>),
1739     // Store an indirect return value to an operand local place.
1740     IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
1741     // Store a direct return value to an operand local place.
1742     DirectOperand(mir::Local),
1743 }