1 use crate::util::patch::MirPatch;
3 use rustc_hir::lang_items;
4 use rustc_index::vec::Idx;
5 use rustc_middle::mir::*;
6 use rustc_middle::traits::Reveal;
7 use rustc_middle::ty::subst::SubstsRef;
8 use rustc_middle::ty::util::IntTypeExt;
9 use rustc_middle::ty::{self, Ty, TyCtxt};
10 use rustc_target::abi::VariantIdx;
13 use std::convert::TryInto;
15 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
16 pub enum DropFlagState {
17 Present, // i.e., initialized
18 Absent, // i.e., deinitialized or "moved"
22 pub fn value(self) -> bool {
24 DropFlagState::Present => true,
25 DropFlagState::Absent => false,
39 pub enum DropFlagMode {
44 #[derive(Copy, Clone, Debug)]
51 fn is_cleanup(self) -> bool {
53 Unwind::To(..) => false,
54 Unwind::InCleanup => true,
58 fn into_option(self) -> Option<BasicBlock> {
60 Unwind::To(bb) => Some(bb),
61 Unwind::InCleanup => None,
65 fn map<F>(self, f: F) -> Self
67 F: FnOnce(BasicBlock) -> BasicBlock,
70 Unwind::To(bb) => Unwind::To(f(bb)),
71 Unwind::InCleanup => Unwind::InCleanup,
76 pub trait DropElaborator<'a, 'tcx>: fmt::Debug {
77 type Path: Copy + fmt::Debug;
79 fn patch(&mut self) -> &mut MirPatch<'tcx>;
80 fn body(&self) -> &'a Body<'tcx>;
81 fn tcx(&self) -> TyCtxt<'tcx>;
82 fn param_env(&self) -> ty::ParamEnv<'tcx>;
84 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
85 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
86 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
88 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
89 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
90 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
91 fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path>;
95 struct DropCtxt<'l, 'b, 'tcx, D>
97 D: DropElaborator<'b, 'tcx>,
99 elaborator: &'l mut D,
101 source_info: SourceInfo,
109 pub fn elaborate_drop<'b, 'tcx, D>(
111 source_info: SourceInfo,
118 D: DropElaborator<'b, 'tcx>,
121 DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb)
124 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
126 D: DropElaborator<'b, 'tcx>,
129 fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
130 place.ty(self.elaborator.body(), self.tcx()).ty
133 fn tcx(&self) -> TyCtxt<'tcx> {
134 self.elaborator.tcx()
137 /// This elaborates a single drop instruction, located at `bb`, and
140 /// The elaborated drop checks the drop flags to only drop what
143 /// In addition, the relevant drop flags also need to be cleared
144 /// to avoid double-drops. However, in the middle of a complex
145 /// drop, one must avoid clearing some of the flags before they
146 /// are read, as that would cause a memory leak.
148 /// In particular, when dropping an ADT, multiple fields may be
149 /// joined together under the `rest` subpath. They are all controlled
150 /// by the primary drop flag, but only the last rest-field dropped
151 /// should clear it (and it must also not clear anything else).
153 // FIXME: I think we should just control the flags externally,
154 // and then we do not need this machinery.
155 pub fn elaborate_drop(&mut self, bb: BasicBlock) {
156 debug!("elaborate_drop({:?}, {:?})", bb, self);
157 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
158 debug!("elaborate_drop({:?}, {:?}): live - {:?}", bb, self, style);
163 .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
165 DropStyle::Static => {
166 let loc = self.terminator_loc(bb);
167 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
168 self.elaborator.patch().patch_terminator(
170 TerminatorKind::Drop {
171 location: self.place,
173 unwind: self.unwind.into_option(),
177 DropStyle::Conditional => {
178 let unwind = self.unwind; // FIXME(#43234)
179 let succ = self.succ;
180 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
183 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
186 let drop_bb = self.open_drop();
189 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
194 /// Returns the place and move path for each field of `variant`,
195 /// (the move path is `None` if the field is a rest field).
196 fn move_paths_for_fields(
198 base_place: Place<'tcx>,
199 variant_path: D::Path,
200 variant: &'tcx ty::VariantDef,
201 substs: SubstsRef<'tcx>,
202 ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
208 let field = Field::new(i);
209 let subpath = self.elaborator.field_subpath(variant_path, field);
210 let tcx = self.tcx();
212 assert_eq!(self.elaborator.param_env().reveal, Reveal::All);
214 tcx.normalize_erasing_regions(self.elaborator.param_env(), f.ty(tcx, substs));
215 (tcx.mk_place_field(base_place.clone(), field, field_ty), subpath)
223 path: Option<D::Path>,
227 if let Some(path) = path {
228 debug!("drop_subpath: for std field {:?}", place);
231 elaborator: self.elaborator,
232 source_info: self.source_info,
238 .elaborated_drop_block()
240 debug!("drop_subpath: for rest field {:?}", place);
243 elaborator: self.elaborator,
244 source_info: self.source_info,
248 // Using `self.path` here to condition the drop on
249 // our own drop flag.
252 .complete_drop(None, succ, unwind)
256 /// Creates one-half of the drop ladder for a list of fields, and return
257 /// the list of steps in it in reverse order, with the first step
258 /// dropping 0 fields and so on.
260 /// `unwind_ladder` is such a list of steps in reverse order,
261 /// which is called if the matching step of the drop glue panics.
264 unwind_ladder: &[Unwind],
265 mut succ: BasicBlock,
266 fields: &[(Place<'tcx>, Option<D::Path>)],
267 ) -> Vec<BasicBlock> {
270 .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| {
271 succ = self.drop_subpath(place, path, succ, unwind_succ);
277 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
278 // Clear the "master" drop flag at the end. This is needed
279 // because the "master" drop protects the ADT's discriminant,
280 // which is invalidated after the ADT is dropped.
281 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
283 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
284 unwind.map(|unwind| {
285 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
290 /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
292 /// For example, with 3 fields, the drop ladder is
295 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
297 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
299 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
301 /// ELAB(drop location.1 [target=.c2])
303 /// ELAB(drop location.2 [target=`self.unwind`])
305 /// NOTE: this does not clear the master drop flag, so you need
306 /// to point succ/unwind on a `drop_ladder_bottom`.
309 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
312 ) -> (BasicBlock, Unwind) {
313 debug!("drop_ladder({:?}, {:?})", self, fields);
315 let mut fields = fields;
316 fields.retain(|&(place, _)| {
317 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
320 debug!("drop_ladder - fields needing drop: {:?}", fields);
322 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
323 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
324 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
325 halfladder.into_iter().map(Unwind::To).collect()
330 let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields);
332 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
335 fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
336 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
343 self.tcx().mk_place_field(self.place.clone(), Field::new(i), ty),
344 self.elaborator.field_subpath(self.path, Field::new(i)),
349 let (succ, unwind) = self.drop_ladder_bottom();
350 self.drop_ladder(fields, succ, unwind).0
353 fn open_drop_for_box(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
354 debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
356 let interior = self.tcx().mk_place_deref(self.place.clone());
357 let interior_path = self.elaborator.deref_subpath(self.path);
359 let succ = self.succ; // FIXME(#43234)
360 let unwind = self.unwind;
361 let succ = self.box_free_block(adt, substs, succ, unwind);
363 self.unwind.map(|unwind| self.box_free_block(adt, substs, unwind, Unwind::InCleanup));
365 self.drop_subpath(interior, interior_path, succ, unwind_succ)
368 fn open_drop_for_adt(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
369 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
370 if adt.variants.is_empty() {
371 return self.elaborator.patch().new_block(BasicBlockData {
373 terminator: Some(Terminator {
374 source_info: self.source_info,
375 kind: TerminatorKind::Unreachable,
377 is_cleanup: self.unwind.is_cleanup(),
382 adt.is_union() || Some(adt.did) == self.tcx().lang_items().manually_drop();
383 let contents_drop = if skip_contents {
384 (self.succ, self.unwind)
386 self.open_drop_for_adt_contents(adt, substs)
389 if adt.has_dtor(self.tcx()) {
390 self.destructor_call_block(contents_drop)
396 fn open_drop_for_adt_contents(
398 adt: &'tcx ty::AdtDef,
399 substs: SubstsRef<'tcx>,
400 ) -> (BasicBlock, Unwind) {
401 let (succ, unwind) = self.drop_ladder_bottom();
403 let fields = self.move_paths_for_fields(
406 &adt.variants[VariantIdx::new(0)],
409 self.drop_ladder(fields, succ, unwind)
411 self.open_drop_for_multivariant(adt, substs, succ, unwind)
415 fn open_drop_for_multivariant(
417 adt: &'tcx ty::AdtDef,
418 substs: SubstsRef<'tcx>,
421 ) -> (BasicBlock, Unwind) {
422 let mut values = Vec::with_capacity(adt.variants.len());
423 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
424 let mut unwind_blocks =
425 if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants.len())) };
427 let mut have_otherwise_with_drop_glue = false;
428 let mut have_otherwise = false;
429 let tcx = self.tcx();
431 for (variant_index, discr) in adt.discriminants(tcx) {
432 let variant = &adt.variants[variant_index];
433 let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
435 if let Some(variant_path) = subpath {
436 let base_place = tcx.mk_place_elem(
438 ProjectionElem::Downcast(Some(variant.ident.name), variant_index),
440 let fields = self.move_paths_for_fields(base_place, variant_path, &variant, substs);
441 values.push(discr.val);
442 if let Unwind::To(unwind) = unwind {
443 // We can't use the half-ladder from the original
444 // drop ladder, because this breaks the
445 // "funclet can't have 2 successor funclets"
446 // requirement from MSVC:
448 // switch unwind-switch
450 // v1.0 v2.0 v2.0-unwind v1.0-unwind
452 // v1.1-unwind v2.1-unwind |
454 // \-------------------------------/
456 // Create a duplicate half-ladder to avoid that. We
457 // could technically only do this on MSVC, but I
458 // I want to minimize the divergence between MSVC
461 let unwind_blocks = unwind_blocks.as_mut().unwrap();
462 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
463 let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields);
464 unwind_blocks.push(halfladder.last().cloned().unwrap());
466 let (normal, _) = self.drop_ladder(fields, succ, unwind);
467 normal_blocks.push(normal);
469 have_otherwise = true;
471 let param_env = self.elaborator.param_env();
472 let have_field_with_drop_glue = variant
475 .any(|field| field.ty(tcx, substs).needs_drop(tcx, param_env));
476 if have_field_with_drop_glue {
477 have_otherwise_with_drop_glue = true;
484 } else if !have_otherwise_with_drop_glue {
485 normal_blocks.push(self.goto_block(succ, unwind));
486 if let Unwind::To(unwind) = unwind {
487 unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
490 normal_blocks.push(self.drop_block(succ, unwind));
491 if let Unwind::To(unwind) = unwind {
492 unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
497 self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
498 unwind.map(|unwind| {
499 self.adt_switch_block(
501 unwind_blocks.unwrap(),
512 adt: &'tcx ty::AdtDef,
513 blocks: Vec<BasicBlock>,
518 // If there are multiple variants, then if something
519 // is present within the enum the discriminant, tracked
520 // by the rest path, must be initialized.
522 // Additionally, we do not want to switch on the
523 // discriminant after it is free-ed, because that
524 // way lies only trouble.
525 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
526 let discr = Place::from(self.new_temp(discr_ty));
527 let discr_rv = Rvalue::Discriminant(self.place);
528 let switch_block = BasicBlockData {
529 statements: vec![self.assign(discr, discr_rv)],
530 terminator: Some(Terminator {
531 source_info: self.source_info,
532 kind: TerminatorKind::SwitchInt {
533 discr: Operand::Move(discr),
535 values: From::from(values.to_owned()),
539 is_cleanup: unwind.is_cleanup(),
541 let switch_block = self.elaborator.patch().new_block(switch_block);
542 self.drop_flag_test_block(switch_block, succ, unwind)
545 fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
546 debug!("destructor_call_block({:?}, {:?})", self, succ);
547 let tcx = self.tcx();
548 let drop_trait = tcx.lang_items().drop_trait().unwrap();
549 let drop_fn = tcx.associated_items(drop_trait).in_definition_order().next().unwrap();
550 let ty = self.place_ty(self.place);
551 let substs = tcx.mk_substs_trait(ty, &[]);
554 tcx.mk_ref(tcx.lifetimes.re_erased, ty::TypeAndMut { ty, mutbl: hir::Mutability::Mut });
555 let ref_place = self.new_temp(ref_ty);
556 let unit_temp = Place::from(self.new_temp(tcx.mk_unit()));
558 let result = BasicBlockData {
559 statements: vec![self.assign(
560 Place::from(ref_place),
562 tcx.lifetimes.re_erased,
563 BorrowKind::Mut { allow_two_phase_borrow: false },
567 terminator: Some(Terminator {
568 kind: TerminatorKind::Call {
569 func: Operand::function_handle(
573 self.source_info.span,
575 args: vec![Operand::Move(Place::from(ref_place))],
576 destination: Some((unit_temp, succ)),
577 cleanup: unwind.into_option(),
580 source_info: self.source_info,
582 is_cleanup: unwind.is_cleanup(),
584 self.elaborator.patch().new_block(result)
587 /// Create a loop that drops an array:
591 /// can_go = cur == length_or_end
592 /// if can_go then succ else drop-block
596 /// cur = cur.offset(1)
598 /// ptr = &raw mut P[cur]
607 length_or_end: Place<'tcx>,
612 let copy = |place: Place<'tcx>| Operand::Copy(place);
613 let move_ = |place: Place<'tcx>| Operand::Move(place);
614 let tcx = self.tcx();
616 let ptr_ty = tcx.mk_ptr(ty::TypeAndMut { ty: ety, mutbl: hir::Mutability::Mut });
617 let ptr = Place::from(self.new_temp(ptr_ty));
618 let can_go = Place::from(self.new_temp(tcx.types.bool));
620 let one = self.constant_usize(1);
621 let (ptr_next, cur_next) = if ptr_based {
622 (Rvalue::Use(copy(cur.into())), Rvalue::BinaryOp(BinOp::Offset, move_(cur.into()), one))
625 Rvalue::AddressOf(Mutability::Mut, tcx.mk_place_index(self.place.clone(), cur)),
626 Rvalue::BinaryOp(BinOp::Add, move_(cur.into()), one),
630 let drop_block = BasicBlockData {
631 statements: vec![self.assign(ptr, ptr_next), self.assign(Place::from(cur), cur_next)],
632 is_cleanup: unwind.is_cleanup(),
633 terminator: Some(Terminator {
634 source_info: self.source_info,
635 // this gets overwritten by drop elaboration.
636 kind: TerminatorKind::Unreachable,
639 let drop_block = self.elaborator.patch().new_block(drop_block);
641 let loop_block = BasicBlockData {
642 statements: vec![self.assign(
644 Rvalue::BinaryOp(BinOp::Eq, copy(Place::from(cur)), copy(length_or_end)),
646 is_cleanup: unwind.is_cleanup(),
647 terminator: Some(Terminator {
648 source_info: self.source_info,
649 kind: TerminatorKind::if_(tcx, move_(can_go), succ, drop_block),
652 let loop_block = self.elaborator.patch().new_block(loop_block);
654 self.elaborator.patch().patch_terminator(
656 TerminatorKind::Drop {
657 location: tcx.mk_place_deref(ptr.clone()),
659 unwind: unwind.into_option(),
666 fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock {
667 debug!("open_drop_for_array({:?}, {:?})", ety, opt_size);
669 // if size_of::<ety>() == 0 {
675 let tcx = self.tcx();
677 if let Some(size) = opt_size {
678 let size: u32 = size.try_into().unwrap_or_else(|_| {
679 bug!("move out check isn't implemented for array sizes bigger than u32::MAX");
681 let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size)
686 ProjectionElem::ConstantIndex {
692 self.elaborator.array_subpath(self.path, i, size),
697 if fields.iter().any(|(_, path)| path.is_some()) {
698 let (succ, unwind) = self.drop_ladder_bottom();
699 return self.drop_ladder(fields, succ, unwind).0;
703 let move_ = |place: Place<'tcx>| Operand::Move(place);
704 let elem_size = Place::from(self.new_temp(tcx.types.usize));
705 let len = Place::from(self.new_temp(tcx.types.usize));
707 static USIZE_SWITCH_ZERO: &[u128] = &[0];
709 let base_block = BasicBlockData {
711 self.assign(elem_size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
712 self.assign(len, Rvalue::Len(self.place)),
714 is_cleanup: self.unwind.is_cleanup(),
715 terminator: Some(Terminator {
716 source_info: self.source_info,
717 kind: TerminatorKind::SwitchInt {
718 discr: move_(elem_size),
719 switch_ty: tcx.types.usize,
720 values: From::from(USIZE_SWITCH_ZERO),
722 self.drop_loop_pair(ety, false, len.clone()),
723 self.drop_loop_pair(ety, true, len.clone()),
728 self.elaborator.patch().new_block(base_block)
731 /// Creates a pair of drop-loops of `place`, which drops its contents, even
732 /// in the case of 1 panic. If `ptr_based`, creates a pointer loop,
733 /// otherwise create an index loop.
740 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
741 let tcx = self.tcx();
742 let iter_ty = if ptr_based { tcx.mk_mut_ptr(ety) } else { tcx.types.usize };
744 let cur = self.new_temp(iter_ty);
745 let length_or_end = if ptr_based { Place::from(self.new_temp(iter_ty)) } else { length };
747 let unwind = self.unwind.map(|unwind| {
748 self.drop_loop(unwind, cur, length_or_end, ety, Unwind::InCleanup, ptr_based)
751 let loop_block = self.drop_loop(self.succ, cur, length_or_end, ety, unwind, ptr_based);
753 let cur = Place::from(cur);
754 let drop_block_stmts = if ptr_based {
755 let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
756 let tmp = Place::from(self.new_temp(tmp_ty));
758 // cur = tmp as *mut T;
759 // end = Offset(cur, len);
761 self.assign(tmp, Rvalue::AddressOf(Mutability::Mut, self.place)),
762 self.assign(cur, Rvalue::Cast(CastKind::Misc, Operand::Move(tmp), iter_ty)),
765 Rvalue::BinaryOp(BinOp::Offset, Operand::Copy(cur), Operand::Move(length)),
769 // cur = 0 (length already pushed)
770 let zero = self.constant_usize(0);
771 vec![self.assign(cur, Rvalue::Use(zero))]
773 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
774 statements: drop_block_stmts,
775 is_cleanup: unwind.is_cleanup(),
776 terminator: Some(Terminator {
777 source_info: self.source_info,
778 kind: TerminatorKind::Goto { target: loop_block },
782 // FIXME(#34708): handle partially-dropped array/slice elements.
783 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
784 self.drop_flag_test_block(reset_block, self.succ, unwind)
787 /// The slow-path - create an "open", elaborated drop for a type
788 /// which is moved-out-of only partially, and patch `bb` to a jump
789 /// to it. This must not be called on ADTs with a destructor,
790 /// as these can't be moved-out-of, except for `Box<T>`, which is
793 /// This creates a "drop ladder" that drops the needed fields of the
794 /// ADT, both in the success case or if one of the destructors fail.
795 fn open_drop(&mut self) -> BasicBlock {
796 let ty = self.place_ty(self.place);
798 ty::Closure(_, substs) => {
799 let tys: Vec<_> = substs.as_closure().upvar_tys().collect();
800 self.open_drop_for_tuple(&tys)
802 // Note that `elaborate_drops` only drops the upvars of a generator,
803 // and this is ok because `open_drop` here can only be reached
804 // within that own generator's resume function.
805 // This should only happen for the self argument on the resume function.
806 // It effetively only contains upvars until the generator transformation runs.
807 // See librustc_body/transform/generator.rs for more details.
808 ty::Generator(_, substs, _) => {
809 let tys: Vec<_> = substs.as_generator().upvar_tys().collect();
810 self.open_drop_for_tuple(&tys)
813 let tys: Vec<_> = ty.tuple_fields().collect();
814 self.open_drop_for_tuple(&tys)
816 ty::Adt(def, substs) => {
818 self.open_drop_for_box(def, substs)
820 self.open_drop_for_adt(def, substs)
824 let unwind = self.unwind; // FIXME(#43234)
825 let succ = self.succ;
826 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
828 ty::Array(ety, size) => {
829 let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env());
830 self.open_drop_for_array(ety, size)
832 ty::Slice(ety) => self.open_drop_for_array(ety, None),
834 _ => bug!("open drop from non-ADT `{:?}`", ty),
838 /// Returns a basic block that drop a place using the context
839 /// and path in `c`. If `mode` is something, also clear `c`
842 /// if FLAG(self.path)
843 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
847 drop_mode: Option<DropFlagMode>,
851 debug!("complete_drop({:?},{:?})", self, drop_mode);
853 let drop_block = self.drop_block(succ, unwind);
854 let drop_block = if let Some(mode) = drop_mode {
855 self.drop_flag_reset_block(mode, drop_block, unwind)
860 self.drop_flag_test_block(drop_block, succ, unwind)
863 fn drop_flag_reset_block(
869 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
871 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
872 let block_start = Location { block, statement_index: 0 };
873 self.elaborator.clear_drop_flag(block_start, self.path, mode);
877 fn elaborated_drop_block(&mut self) -> BasicBlock {
878 debug!("elaborated_drop_block({:?})", self);
879 let unwind = self.unwind; // FIXME(#43234)
880 let succ = self.succ;
881 let blk = self.drop_block(succ, unwind);
882 self.elaborate_drop(blk);
888 adt: &'tcx ty::AdtDef,
889 substs: SubstsRef<'tcx>,
893 let block = self.unelaborated_free_block(adt, substs, target, unwind);
894 self.drop_flag_test_block(block, target, unwind)
897 fn unelaborated_free_block(
899 adt: &'tcx ty::AdtDef,
900 substs: SubstsRef<'tcx>,
904 let tcx = self.tcx();
905 let unit_temp = Place::from(self.new_temp(tcx.mk_unit()));
907 tcx.require_lang_item(lang_items::BoxFreeFnLangItem, Some(self.source_info.span));
908 let args = adt.variants[VariantIdx::new(0)]
913 let field = Field::new(i);
914 let field_ty = f.ty(tcx, substs);
915 Operand::Move(tcx.mk_place_field(self.place.clone(), field, field_ty))
919 let call = TerminatorKind::Call {
920 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
922 destination: Some((unit_temp, target)),
924 from_hir_call: false,
926 let free_block = self.new_block(unwind, call);
928 let block_start = Location { block: free_block, statement_index: 0 };
929 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
933 fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
935 TerminatorKind::Drop { location: self.place, target, unwind: unwind.into_option() };
936 self.new_block(unwind, block)
939 fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
940 let block = TerminatorKind::Goto { target };
941 self.new_block(unwind, block)
944 fn drop_flag_test_block(
947 on_unset: BasicBlock,
950 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
952 "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
953 self, on_set, on_unset, unwind, style
957 DropStyle::Dead => on_unset,
958 DropStyle::Static => on_set,
959 DropStyle::Conditional | DropStyle::Open => {
960 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
961 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
962 self.new_block(unwind, term)
967 fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
968 self.elaborator.patch().new_block(BasicBlockData {
970 terminator: Some(Terminator { source_info: self.source_info, kind: k }),
971 is_cleanup: unwind.is_cleanup(),
975 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
976 self.elaborator.patch().new_temp(ty, self.source_info.span)
979 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
980 let body = self.elaborator.body();
981 self.elaborator.patch().terminator_loc(body, bb)
984 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
985 Operand::Constant(box Constant {
986 span: self.source_info.span,
988 literal: ty::Const::from_usize(self.tcx(), val.into()),
992 fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
993 Statement { source_info: self.source_info, kind: StatementKind::Assign(box (lhs, rhs)) }