1 use crate::util::patch::MirPatch;
3 use rustc_hir::lang_items::LangItem;
4 use rustc_index::vec::Idx;
5 use rustc_middle::mir::*;
6 use rustc_middle::traits::Reveal;
7 use rustc_middle::ty::subst::SubstsRef;
8 use rustc_middle::ty::util::IntTypeExt;
9 use rustc_middle::ty::{self, Ty, TyCtxt};
10 use rustc_target::abi::VariantIdx;
13 /// The value of an inserted drop flag.
14 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
15 pub enum DropFlagState {
16 /// The tracked value is initialized and needs to be dropped when leaving its scope.
19 /// The tracked value is uninitialized or was moved out of and does not need to be dropped when
20 /// leaving its scope.
25 pub fn value(self) -> bool {
27 DropFlagState::Present => true,
28 DropFlagState::Absent => false,
33 /// Describes how/if a value should be dropped.
36 /// The value is already dead at the drop location, no drop will be executed.
39 /// The value is known to always be initialized at the drop location, drop will always be
43 /// Whether the value needs to be dropped depends on its drop flag.
46 /// An "open" drop is one where only the fields of a value are dropped.
48 /// For example, this happens when moving out of a struct field: The rest of the struct will be
49 /// dropped in such an "open" drop. It is also used to generate drop glue for the individual
50 /// components of a value, for example for dropping array elements.
54 /// Which drop flags to affect/check with an operation.
56 pub enum DropFlagMode {
57 /// Only affect the top-level drop flag, not that of any contained fields.
59 /// Affect all nested drop flags in addition to the top-level one.
63 /// Describes if unwinding is necessary and where to unwind to if a panic occurs.
64 #[derive(Copy, Clone, Debug)]
66 /// Unwind to this block.
68 /// Already in an unwind path, any panic will cause an abort.
73 fn is_cleanup(self) -> bool {
75 Unwind::To(..) => false,
76 Unwind::InCleanup => true,
80 fn into_option(self) -> Option<BasicBlock> {
82 Unwind::To(bb) => Some(bb),
83 Unwind::InCleanup => None,
87 fn map<F>(self, f: F) -> Self
89 F: FnOnce(BasicBlock) -> BasicBlock,
92 Unwind::To(bb) => Unwind::To(f(bb)),
93 Unwind::InCleanup => Unwind::InCleanup,
98 pub trait DropElaborator<'a, 'tcx>: fmt::Debug {
99 /// The type representing paths that can be moved out of.
101 /// Users can move out of individual fields of a struct, such as `a.b.c`. This type is used to
102 /// represent such move paths. Sometimes tracking individual move paths is not necessary, in
103 /// which case this may be set to (for example) `()`.
104 type Path: Copy + fmt::Debug;
108 fn patch(&mut self) -> &mut MirPatch<'tcx>;
109 fn body(&self) -> &'a Body<'tcx>;
110 fn tcx(&self) -> TyCtxt<'tcx>;
111 fn param_env(&self) -> ty::ParamEnv<'tcx>;
115 /// Returns how `path` should be dropped, given `mode`.
116 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
118 /// Returns the drop flag of `path` as a MIR `Operand` (or `None` if `path` has no drop flag).
119 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
121 /// Modifies the MIR patch so that the drop flag of `path` (if any) is cleared at `location`.
123 /// If `mode` is deep, drop flags of all child paths should also be cleared by inserting
124 /// additional statements.
125 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
129 /// Returns the subpath of a field of `path` (or `None` if there is no dedicated subpath).
131 /// If this returns `None`, `field` will not get a dedicated drop flag.
132 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
134 /// Returns the subpath of a dereference of `path` (or `None` if there is no dedicated subpath).
136 /// If this returns `None`, `*path` will not get a dedicated drop flag.
138 /// This is only relevant for `Box<T>`, where the contained `T` can be moved out of the box.
139 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
141 /// Returns the subpath of downcasting `path` to one of its variants.
143 /// If this returns `None`, the downcast of `path` will not get a dedicated drop flag.
144 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
146 /// Returns the subpath of indexing a fixed-size array `path`.
148 /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
150 /// This is only relevant for array patterns, which can move out of individual array elements.
151 fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
155 struct DropCtxt<'l, 'b, 'tcx, D>
157 D: DropElaborator<'b, 'tcx>,
159 elaborator: &'l mut D,
161 source_info: SourceInfo,
169 /// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
171 /// The passed `elaborator` is used to determine what should happen at the drop terminator. It
172 /// decides whether the drop can be statically determined or whether it needs a dynamic drop flag,
173 /// and whether the drop is "open", ie. should be expanded to drop all subfields of the dropped
176 /// When this returns, the MIR patch in the `elaborator` contains the necessary changes.
177 pub fn elaborate_drop<'b, 'tcx, D>(
179 source_info: SourceInfo,
186 D: DropElaborator<'b, 'tcx>,
189 DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb)
192 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
194 D: DropElaborator<'b, 'tcx>,
197 fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
198 place.ty(self.elaborator.body(), self.tcx()).ty
201 fn tcx(&self) -> TyCtxt<'tcx> {
202 self.elaborator.tcx()
205 /// This elaborates a single drop instruction, located at `bb`, and
208 /// The elaborated drop checks the drop flags to only drop what
211 /// In addition, the relevant drop flags also need to be cleared
212 /// to avoid double-drops. However, in the middle of a complex
213 /// drop, one must avoid clearing some of the flags before they
214 /// are read, as that would cause a memory leak.
216 /// In particular, when dropping an ADT, multiple fields may be
217 /// joined together under the `rest` subpath. They are all controlled
218 /// by the primary drop flag, but only the last rest-field dropped
219 /// should clear it (and it must also not clear anything else).
221 // FIXME: I think we should just control the flags externally,
222 // and then we do not need this machinery.
223 pub fn elaborate_drop(&mut self, bb: BasicBlock) {
224 debug!("elaborate_drop({:?}, {:?})", bb, self);
225 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
226 debug!("elaborate_drop({:?}, {:?}): live - {:?}", bb, self, style);
231 .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
233 DropStyle::Static => {
234 let loc = self.terminator_loc(bb);
235 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
236 self.elaborator.patch().patch_terminator(
238 TerminatorKind::Drop {
241 unwind: self.unwind.into_option(),
245 DropStyle::Conditional => {
246 let unwind = self.unwind; // FIXME(#43234)
247 let succ = self.succ;
248 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
251 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
254 let drop_bb = self.open_drop();
257 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
262 /// Returns the place and move path for each field of `variant`,
263 /// (the move path is `None` if the field is a rest field).
264 fn move_paths_for_fields(
266 base_place: Place<'tcx>,
267 variant_path: D::Path,
268 variant: &'tcx ty::VariantDef,
269 substs: SubstsRef<'tcx>,
270 ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
276 let field = Field::new(i);
277 let subpath = self.elaborator.field_subpath(variant_path, field);
278 let tcx = self.tcx();
280 assert_eq!(self.elaborator.param_env().reveal(), Reveal::All);
282 tcx.normalize_erasing_regions(self.elaborator.param_env(), f.ty(tcx, substs));
283 (tcx.mk_place_field(base_place, field, field_ty), subpath)
291 path: Option<D::Path>,
295 if let Some(path) = path {
296 debug!("drop_subpath: for std field {:?}", place);
299 elaborator: self.elaborator,
300 source_info: self.source_info,
306 .elaborated_drop_block()
308 debug!("drop_subpath: for rest field {:?}", place);
311 elaborator: self.elaborator,
312 source_info: self.source_info,
316 // Using `self.path` here to condition the drop on
317 // our own drop flag.
320 .complete_drop(None, succ, unwind)
324 /// Creates one-half of the drop ladder for a list of fields, and return
325 /// the list of steps in it in reverse order, with the first step
326 /// dropping 0 fields and so on.
328 /// `unwind_ladder` is such a list of steps in reverse order,
329 /// which is called if the matching step of the drop glue panics.
332 unwind_ladder: &[Unwind],
333 mut succ: BasicBlock,
334 fields: &[(Place<'tcx>, Option<D::Path>)],
335 ) -> Vec<BasicBlock> {
338 .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| {
339 succ = self.drop_subpath(place, path, succ, unwind_succ);
345 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
346 // Clear the "master" drop flag at the end. This is needed
347 // because the "master" drop protects the ADT's discriminant,
348 // which is invalidated after the ADT is dropped.
349 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
351 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
352 unwind.map(|unwind| {
353 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
358 /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
360 /// For example, with 3 fields, the drop ladder is
363 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
365 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
367 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
369 /// ELAB(drop location.1 [target=.c2])
371 /// ELAB(drop location.2 [target=`self.unwind`])
373 /// NOTE: this does not clear the master drop flag, so you need
374 /// to point succ/unwind on a `drop_ladder_bottom`.
377 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
380 ) -> (BasicBlock, Unwind) {
381 debug!("drop_ladder({:?}, {:?})", self, fields);
383 let mut fields = fields;
384 fields.retain(|&(place, _)| {
385 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
388 debug!("drop_ladder - fields needing drop: {:?}", fields);
390 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
391 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
392 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
393 halfladder.into_iter().map(Unwind::To).collect()
398 let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields);
400 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
403 fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
404 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
411 self.tcx().mk_place_field(self.place, Field::new(i), ty),
412 self.elaborator.field_subpath(self.path, Field::new(i)),
417 let (succ, unwind) = self.drop_ladder_bottom();
418 self.drop_ladder(fields, succ, unwind).0
421 fn open_drop_for_box(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
422 debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
424 let interior = self.tcx().mk_place_deref(self.place);
425 let interior_path = self.elaborator.deref_subpath(self.path);
427 let succ = self.box_free_block(adt, substs, self.succ, self.unwind);
429 self.unwind.map(|unwind| self.box_free_block(adt, substs, unwind, Unwind::InCleanup));
431 self.drop_subpath(interior, interior_path, succ, unwind_succ)
434 fn open_drop_for_adt(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
435 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
436 if adt.variants.is_empty() {
437 return self.elaborator.patch().new_block(BasicBlockData {
439 terminator: Some(Terminator {
440 source_info: self.source_info,
441 kind: TerminatorKind::Unreachable,
443 is_cleanup: self.unwind.is_cleanup(),
448 adt.is_union() || Some(adt.did) == self.tcx().lang_items().manually_drop();
449 let contents_drop = if skip_contents {
450 (self.succ, self.unwind)
452 self.open_drop_for_adt_contents(adt, substs)
455 if adt.has_dtor(self.tcx()) {
456 self.destructor_call_block(contents_drop)
462 fn open_drop_for_adt_contents(
464 adt: &'tcx ty::AdtDef,
465 substs: SubstsRef<'tcx>,
466 ) -> (BasicBlock, Unwind) {
467 let (succ, unwind) = self.drop_ladder_bottom();
469 let fields = self.move_paths_for_fields(
472 &adt.variants[VariantIdx::new(0)],
475 self.drop_ladder(fields, succ, unwind)
477 self.open_drop_for_multivariant(adt, substs, succ, unwind)
481 fn open_drop_for_multivariant(
483 adt: &'tcx ty::AdtDef,
484 substs: SubstsRef<'tcx>,
487 ) -> (BasicBlock, Unwind) {
488 let mut values = Vec::with_capacity(adt.variants.len());
489 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
490 let mut unwind_blocks =
491 if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants.len())) };
493 let mut have_otherwise_with_drop_glue = false;
494 let mut have_otherwise = false;
495 let tcx = self.tcx();
497 for (variant_index, discr) in adt.discriminants(tcx) {
498 let variant = &adt.variants[variant_index];
499 let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
501 if let Some(variant_path) = subpath {
502 let base_place = tcx.mk_place_elem(
504 ProjectionElem::Downcast(Some(variant.ident.name), variant_index),
506 let fields = self.move_paths_for_fields(base_place, variant_path, &variant, substs);
507 values.push(discr.val);
508 if let Unwind::To(unwind) = unwind {
509 // We can't use the half-ladder from the original
510 // drop ladder, because this breaks the
511 // "funclet can't have 2 successor funclets"
512 // requirement from MSVC:
514 // switch unwind-switch
516 // v1.0 v2.0 v2.0-unwind v1.0-unwind
518 // v1.1-unwind v2.1-unwind |
520 // \-------------------------------/
522 // Create a duplicate half-ladder to avoid that. We
523 // could technically only do this on MSVC, but I
524 // I want to minimize the divergence between MSVC
527 let unwind_blocks = unwind_blocks.as_mut().unwrap();
528 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
529 let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields);
530 unwind_blocks.push(halfladder.last().cloned().unwrap());
532 let (normal, _) = self.drop_ladder(fields, succ, unwind);
533 normal_blocks.push(normal);
535 have_otherwise = true;
537 let param_env = self.elaborator.param_env();
538 let have_field_with_drop_glue = variant
541 .any(|field| field.ty(tcx, substs).needs_drop(tcx, param_env));
542 if have_field_with_drop_glue {
543 have_otherwise_with_drop_glue = true;
550 } else if !have_otherwise_with_drop_glue {
551 normal_blocks.push(self.goto_block(succ, unwind));
552 if let Unwind::To(unwind) = unwind {
553 unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
556 normal_blocks.push(self.drop_block(succ, unwind));
557 if let Unwind::To(unwind) = unwind {
558 unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
563 self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
564 unwind.map(|unwind| {
565 self.adt_switch_block(
567 unwind_blocks.unwrap(),
578 adt: &'tcx ty::AdtDef,
579 blocks: Vec<BasicBlock>,
584 // If there are multiple variants, then if something
585 // is present within the enum the discriminant, tracked
586 // by the rest path, must be initialized.
588 // Additionally, we do not want to switch on the
589 // discriminant after it is free-ed, because that
590 // way lies only trouble.
591 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
592 let discr = Place::from(self.new_temp(discr_ty));
593 let discr_rv = Rvalue::Discriminant(self.place);
594 let switch_block = BasicBlockData {
595 statements: vec![self.assign(discr, discr_rv)],
596 terminator: Some(Terminator {
597 source_info: self.source_info,
598 kind: TerminatorKind::SwitchInt {
599 discr: Operand::Move(discr),
601 values: From::from(values.to_owned()),
605 is_cleanup: unwind.is_cleanup(),
607 let switch_block = self.elaborator.patch().new_block(switch_block);
608 self.drop_flag_test_block(switch_block, succ, unwind)
611 fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
612 debug!("destructor_call_block({:?}, {:?})", self, succ);
613 let tcx = self.tcx();
614 let drop_trait = tcx.require_lang_item(LangItem::Drop, None);
615 let drop_fn = tcx.associated_items(drop_trait).in_definition_order().next().unwrap();
616 let ty = self.place_ty(self.place);
617 let substs = tcx.mk_substs_trait(ty, &[]);
620 tcx.mk_ref(tcx.lifetimes.re_erased, ty::TypeAndMut { ty, mutbl: hir::Mutability::Mut });
621 let ref_place = self.new_temp(ref_ty);
622 let unit_temp = Place::from(self.new_temp(tcx.mk_unit()));
624 let result = BasicBlockData {
625 statements: vec![self.assign(
626 Place::from(ref_place),
628 tcx.lifetimes.re_erased,
629 BorrowKind::Mut { allow_two_phase_borrow: false },
633 terminator: Some(Terminator {
634 kind: TerminatorKind::Call {
635 func: Operand::function_handle(
639 self.source_info.span,
641 args: vec![Operand::Move(Place::from(ref_place))],
642 destination: Some((unit_temp, succ)),
643 cleanup: unwind.into_option(),
645 fn_span: self.source_info.span,
647 source_info: self.source_info,
649 is_cleanup: unwind.is_cleanup(),
651 self.elaborator.patch().new_block(result)
654 /// Create a loop that drops an array:
658 /// can_go = cur == length_or_end
659 /// if can_go then succ else drop-block
663 /// cur = cur.offset(1)
665 /// ptr = &raw mut P[cur]
674 length_or_end: Place<'tcx>,
679 let copy = |place: Place<'tcx>| Operand::Copy(place);
680 let move_ = |place: Place<'tcx>| Operand::Move(place);
681 let tcx = self.tcx();
683 let ptr_ty = tcx.mk_ptr(ty::TypeAndMut { ty: ety, mutbl: hir::Mutability::Mut });
684 let ptr = Place::from(self.new_temp(ptr_ty));
685 let can_go = Place::from(self.new_temp(tcx.types.bool));
687 let one = self.constant_usize(1);
688 let (ptr_next, cur_next) = if ptr_based {
689 (Rvalue::Use(copy(cur.into())), Rvalue::BinaryOp(BinOp::Offset, move_(cur.into()), one))
692 Rvalue::AddressOf(Mutability::Mut, tcx.mk_place_index(self.place, cur)),
693 Rvalue::BinaryOp(BinOp::Add, move_(cur.into()), one),
697 let drop_block = BasicBlockData {
698 statements: vec![self.assign(ptr, ptr_next), self.assign(Place::from(cur), cur_next)],
699 is_cleanup: unwind.is_cleanup(),
700 terminator: Some(Terminator {
701 source_info: self.source_info,
702 // this gets overwritten by drop elaboration.
703 kind: TerminatorKind::Unreachable,
706 let drop_block = self.elaborator.patch().new_block(drop_block);
708 let loop_block = BasicBlockData {
709 statements: vec![self.assign(
711 Rvalue::BinaryOp(BinOp::Eq, copy(Place::from(cur)), copy(length_or_end)),
713 is_cleanup: unwind.is_cleanup(),
714 terminator: Some(Terminator {
715 source_info: self.source_info,
716 kind: TerminatorKind::if_(tcx, move_(can_go), succ, drop_block),
719 let loop_block = self.elaborator.patch().new_block(loop_block);
721 self.elaborator.patch().patch_terminator(
723 TerminatorKind::Drop {
724 place: tcx.mk_place_deref(ptr),
726 unwind: unwind.into_option(),
733 fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock {
734 debug!("open_drop_for_array({:?}, {:?})", ety, opt_size);
736 // if size_of::<ety>() == 0 {
742 let tcx = self.tcx();
744 if let Some(size) = opt_size {
745 let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size)
750 ProjectionElem::ConstantIndex {
756 self.elaborator.array_subpath(self.path, i, size),
761 if fields.iter().any(|(_, path)| path.is_some()) {
762 let (succ, unwind) = self.drop_ladder_bottom();
763 return self.drop_ladder(fields, succ, unwind).0;
767 let move_ = |place: Place<'tcx>| Operand::Move(place);
768 let elem_size = Place::from(self.new_temp(tcx.types.usize));
769 let len = Place::from(self.new_temp(tcx.types.usize));
771 static USIZE_SWITCH_ZERO: &[u128] = &[0];
773 let base_block = BasicBlockData {
775 self.assign(elem_size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
776 self.assign(len, Rvalue::Len(self.place)),
778 is_cleanup: self.unwind.is_cleanup(),
779 terminator: Some(Terminator {
780 source_info: self.source_info,
781 kind: TerminatorKind::SwitchInt {
782 discr: move_(elem_size),
783 switch_ty: tcx.types.usize,
784 values: From::from(USIZE_SWITCH_ZERO),
786 self.drop_loop_pair(ety, false, len),
787 self.drop_loop_pair(ety, true, len),
792 self.elaborator.patch().new_block(base_block)
795 /// Creates a pair of drop-loops of `place`, which drops its contents, even
796 /// in the case of 1 panic. If `ptr_based`, creates a pointer loop,
797 /// otherwise create an index loop.
804 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
805 let tcx = self.tcx();
806 let iter_ty = if ptr_based { tcx.mk_mut_ptr(ety) } else { tcx.types.usize };
808 let cur = self.new_temp(iter_ty);
809 let length_or_end = if ptr_based { Place::from(self.new_temp(iter_ty)) } else { length };
811 let unwind = self.unwind.map(|unwind| {
812 self.drop_loop(unwind, cur, length_or_end, ety, Unwind::InCleanup, ptr_based)
815 let loop_block = self.drop_loop(self.succ, cur, length_or_end, ety, unwind, ptr_based);
817 let cur = Place::from(cur);
818 let drop_block_stmts = if ptr_based {
819 let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
820 let tmp = Place::from(self.new_temp(tmp_ty));
822 // cur = tmp as *mut T;
823 // end = Offset(cur, len);
825 self.assign(tmp, Rvalue::AddressOf(Mutability::Mut, self.place)),
826 self.assign(cur, Rvalue::Cast(CastKind::Misc, Operand::Move(tmp), iter_ty)),
829 Rvalue::BinaryOp(BinOp::Offset, Operand::Copy(cur), Operand::Move(length)),
833 // cur = 0 (length already pushed)
834 let zero = self.constant_usize(0);
835 vec![self.assign(cur, Rvalue::Use(zero))]
837 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
838 statements: drop_block_stmts,
839 is_cleanup: unwind.is_cleanup(),
840 terminator: Some(Terminator {
841 source_info: self.source_info,
842 kind: TerminatorKind::Goto { target: loop_block },
846 // FIXME(#34708): handle partially-dropped array/slice elements.
847 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
848 self.drop_flag_test_block(reset_block, self.succ, unwind)
851 /// The slow-path - create an "open", elaborated drop for a type
852 /// which is moved-out-of only partially, and patch `bb` to a jump
853 /// to it. This must not be called on ADTs with a destructor,
854 /// as these can't be moved-out-of, except for `Box<T>`, which is
857 /// This creates a "drop ladder" that drops the needed fields of the
858 /// ADT, both in the success case or if one of the destructors fail.
859 fn open_drop(&mut self) -> BasicBlock {
860 let ty = self.place_ty(self.place);
862 ty::Closure(_, substs) => {
863 let tys: Vec<_> = substs.as_closure().upvar_tys().collect();
864 self.open_drop_for_tuple(&tys)
866 // Note that `elaborate_drops` only drops the upvars of a generator,
867 // and this is ok because `open_drop` here can only be reached
868 // within that own generator's resume function.
869 // This should only happen for the self argument on the resume function.
870 // It effetively only contains upvars until the generator transformation runs.
871 // See librustc_body/transform/generator.rs for more details.
872 ty::Generator(_, substs, _) => {
873 let tys: Vec<_> = substs.as_generator().upvar_tys().collect();
874 self.open_drop_for_tuple(&tys)
877 let tys: Vec<_> = ty.tuple_fields().collect();
878 self.open_drop_for_tuple(&tys)
880 ty::Adt(def, substs) => {
882 self.open_drop_for_box(def, substs)
884 self.open_drop_for_adt(def, substs)
888 let unwind = self.unwind; // FIXME(#43234)
889 let succ = self.succ;
890 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
892 ty::Array(ety, size) => {
893 let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env());
894 self.open_drop_for_array(ety, size)
896 ty::Slice(ety) => self.open_drop_for_array(ety, None),
898 _ => bug!("open drop from non-ADT `{:?}`", ty),
904 drop_mode: Option<DropFlagMode>,
908 debug!("complete_drop({:?},{:?})", self, drop_mode);
910 let drop_block = self.drop_block(succ, unwind);
911 let drop_block = if let Some(mode) = drop_mode {
912 self.drop_flag_reset_block(mode, drop_block, unwind)
917 self.drop_flag_test_block(drop_block, succ, unwind)
920 /// Creates a block that resets the drop flag. If `mode` is deep, all children drop flags will
922 fn drop_flag_reset_block(
928 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
930 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
931 let block_start = Location { block, statement_index: 0 };
932 self.elaborator.clear_drop_flag(block_start, self.path, mode);
936 fn elaborated_drop_block(&mut self) -> BasicBlock {
937 debug!("elaborated_drop_block({:?})", self);
938 let blk = self.drop_block(self.succ, self.unwind);
939 self.elaborate_drop(blk);
943 /// Creates a block that frees the backing memory of a `Box` if its drop is required (either
944 /// statically or by checking its drop flag).
946 /// The contained value will not be dropped.
949 adt: &'tcx ty::AdtDef,
950 substs: SubstsRef<'tcx>,
954 let block = self.unelaborated_free_block(adt, substs, target, unwind);
955 self.drop_flag_test_block(block, target, unwind)
958 /// Creates a block that frees the backing memory of a `Box` (without dropping the contained
960 fn unelaborated_free_block(
962 adt: &'tcx ty::AdtDef,
963 substs: SubstsRef<'tcx>,
967 let tcx = self.tcx();
968 let unit_temp = Place::from(self.new_temp(tcx.mk_unit()));
969 let free_func = tcx.require_lang_item(LangItem::BoxFree, Some(self.source_info.span));
970 let args = adt.variants[VariantIdx::new(0)]
975 let field = Field::new(i);
976 let field_ty = f.ty(tcx, substs);
977 Operand::Move(tcx.mk_place_field(self.place, field, field_ty))
981 let call = TerminatorKind::Call {
982 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
984 destination: Some((unit_temp, target)),
986 from_hir_call: false,
987 fn_span: self.source_info.span,
989 let free_block = self.new_block(unwind, call);
991 let block_start = Location { block: free_block, statement_index: 0 };
992 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
996 fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
998 TerminatorKind::Drop { place: self.place, target, unwind: unwind.into_option() };
999 self.new_block(unwind, block)
1002 fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1003 let block = TerminatorKind::Goto { target };
1004 self.new_block(unwind, block)
1007 /// Returns the block to jump to in order to test the drop flag and execute the drop.
1009 /// Depending on the required `DropStyle`, this might be a generated block with an `if`
1010 /// terminator (for dynamic/open drops), or it might be `on_set` or `on_unset` itself, in case
1011 /// the drop can be statically determined.
1012 fn drop_flag_test_block(
1015 on_unset: BasicBlock,
1018 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
1020 "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
1021 self, on_set, on_unset, unwind, style
1025 DropStyle::Dead => on_unset,
1026 DropStyle::Static => on_set,
1027 DropStyle::Conditional | DropStyle::Open => {
1028 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
1029 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
1030 self.new_block(unwind, term)
1035 fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
1036 self.elaborator.patch().new_block(BasicBlockData {
1038 terminator: Some(Terminator { source_info: self.source_info, kind: k }),
1039 is_cleanup: unwind.is_cleanup(),
1043 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1044 self.elaborator.patch().new_temp(ty, self.source_info.span)
1047 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
1048 let body = self.elaborator.body();
1049 self.elaborator.patch().terminator_loc(body, bb)
1052 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1053 Operand::Constant(box Constant {
1054 span: self.source_info.span,
1056 literal: ty::Const::from_usize(self.tcx(), val.into()),
1060 fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1061 Statement { source_info: self.source_info, kind: StatementKind::Assign(box (lhs, rhs)) }