1 use crate::util::patch::MirPatch;
2 use rustc::middle::lang_items;
4 use rustc::traits::Reveal;
5 use rustc::ty::layout::VariantIdx;
6 use rustc::ty::subst::SubstsRef;
7 use rustc::ty::util::IntTypeExt;
8 use rustc::ty::{self, Ty, TyCtxt};
10 use rustc_index::vec::Idx;
13 use std::convert::TryInto;
15 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
16 pub enum DropFlagState {
17 Present, // i.e., initialized
18 Absent, // i.e., deinitialized or "moved"
22 pub fn value(self) -> bool {
24 DropFlagState::Present => true,
25 DropFlagState::Absent => false,
39 pub enum DropFlagMode {
44 #[derive(Copy, Clone, Debug)]
51 fn is_cleanup(self) -> bool {
53 Unwind::To(..) => false,
54 Unwind::InCleanup => true,
58 fn into_option(self) -> Option<BasicBlock> {
60 Unwind::To(bb) => Some(bb),
61 Unwind::InCleanup => None,
65 fn map<F>(self, f: F) -> Self
67 F: FnOnce(BasicBlock) -> BasicBlock,
70 Unwind::To(bb) => Unwind::To(f(bb)),
71 Unwind::InCleanup => Unwind::InCleanup,
76 pub trait DropElaborator<'a, 'tcx>: fmt::Debug {
77 type Path: Copy + fmt::Debug;
79 fn patch(&mut self) -> &mut MirPatch<'tcx>;
80 fn body(&self) -> &'a Body<'tcx>;
81 fn tcx(&self) -> TyCtxt<'tcx>;
82 fn param_env(&self) -> ty::ParamEnv<'tcx>;
84 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
85 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
86 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
88 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
89 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
90 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
91 fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path>;
95 struct DropCtxt<'l, 'b, 'tcx, D>
97 D: DropElaborator<'b, 'tcx>,
99 elaborator: &'l mut D,
101 source_info: SourceInfo,
103 place: &'l Place<'tcx>,
109 pub fn elaborate_drop<'b, 'tcx, D>(
111 source_info: SourceInfo,
118 D: DropElaborator<'b, 'tcx>,
121 DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb)
124 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
126 D: DropElaborator<'b, 'tcx>,
129 fn place_ty(&self, place: &Place<'tcx>) -> Ty<'tcx> {
130 place.ty(self.elaborator.body(), self.tcx()).ty
133 fn tcx(&self) -> TyCtxt<'tcx> {
134 self.elaborator.tcx()
137 /// This elaborates a single drop instruction, located at `bb`, and
140 /// The elaborated drop checks the drop flags to only drop what
143 /// In addition, the relevant drop flags also need to be cleared
144 /// to avoid double-drops. However, in the middle of a complex
145 /// drop, one must avoid clearing some of the flags before they
146 /// are read, as that would cause a memory leak.
148 /// In particular, when dropping an ADT, multiple fields may be
149 /// joined together under the `rest` subpath. They are all controlled
150 /// by the primary drop flag, but only the last rest-field dropped
151 /// should clear it (and it must also not clear anything else).
153 // FIXME: I think we should just control the flags externally,
154 // and then we do not need this machinery.
155 pub fn elaborate_drop(&mut self, bb: BasicBlock) {
156 debug!("elaborate_drop({:?})", self);
157 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
158 debug!("elaborate_drop({:?}): live - {:?}", self, style);
163 .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
165 DropStyle::Static => {
166 let loc = self.terminator_loc(bb);
167 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
168 self.elaborator.patch().patch_terminator(
170 TerminatorKind::Drop {
171 location: *self.place,
173 unwind: self.unwind.into_option(),
177 DropStyle::Conditional => {
178 let unwind = self.unwind; // FIXME(#43234)
179 let succ = self.succ;
180 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
183 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
186 let drop_bb = self.open_drop();
189 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
194 /// Returns the place and move path for each field of `variant`,
195 /// (the move path is `None` if the field is a rest field).
196 fn move_paths_for_fields(
198 base_place: &Place<'tcx>,
199 variant_path: D::Path,
200 variant: &'tcx ty::VariantDef,
201 substs: SubstsRef<'tcx>,
202 ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
208 let field = Field::new(i);
209 let subpath = self.elaborator.field_subpath(variant_path, field);
210 let tcx = self.tcx();
212 assert_eq!(self.elaborator.param_env().reveal, Reveal::All);
214 tcx.normalize_erasing_regions(self.elaborator.param_env(), f.ty(tcx, substs));
215 (tcx.mk_place_field(base_place.clone(), field, field_ty), subpath)
223 path: Option<D::Path>,
227 if let Some(path) = path {
228 debug!("drop_subpath: for std field {:?}", place);
231 elaborator: self.elaborator,
232 source_info: self.source_info,
238 .elaborated_drop_block()
240 debug!("drop_subpath: for rest field {:?}", place);
243 elaborator: self.elaborator,
244 source_info: self.source_info,
248 // Using `self.path` here to condition the drop on
249 // our own drop flag.
252 .complete_drop(None, succ, unwind)
256 /// Creates one-half of the drop ladder for a list of fields, and return
257 /// the list of steps in it in reverse order, with the first step
258 /// dropping 0 fields and so on.
260 /// `unwind_ladder` is such a list of steps in reverse order,
261 /// which is called if the matching step of the drop glue panics.
264 unwind_ladder: &[Unwind],
265 mut succ: BasicBlock,
266 fields: &[(Place<'tcx>, Option<D::Path>)],
267 ) -> Vec<BasicBlock> {
270 .chain(fields.iter().rev().zip(unwind_ladder).map(
271 |(&(ref place, path), &unwind_succ)| {
272 succ = self.drop_subpath(place, path, succ, unwind_succ);
279 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
280 // Clear the "master" drop flag at the end. This is needed
281 // because the "master" drop protects the ADT's discriminant,
282 // which is invalidated after the ADT is dropped.
283 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
285 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
286 unwind.map(|unwind| {
287 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
292 /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
294 /// For example, with 3 fields, the drop ladder is
297 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
299 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
301 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
303 /// ELAB(drop location.1 [target=.c2])
305 /// ELAB(drop location.2 [target=`self.unwind`])
307 /// NOTE: this does not clear the master drop flag, so you need
308 /// to point succ/unwind on a `drop_ladder_bottom`.
311 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
314 ) -> (BasicBlock, Unwind) {
315 debug!("drop_ladder({:?}, {:?})", self, fields);
317 let mut fields = fields;
318 fields.retain(|&(ref place, _)| {
319 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
322 debug!("drop_ladder - fields needing drop: {:?}", fields);
324 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
325 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
326 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
327 halfladder.into_iter().map(Unwind::To).collect()
332 let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields);
334 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
337 fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
338 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
345 self.tcx().mk_place_field(self.place.clone(), Field::new(i), ty),
346 self.elaborator.field_subpath(self.path, Field::new(i)),
351 let (succ, unwind) = self.drop_ladder_bottom();
352 self.drop_ladder(fields, succ, unwind).0
355 fn open_drop_for_box(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
356 debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
358 let interior = self.tcx().mk_place_deref(self.place.clone());
359 let interior_path = self.elaborator.deref_subpath(self.path);
361 let succ = self.succ; // FIXME(#43234)
362 let unwind = self.unwind;
363 let succ = self.box_free_block(adt, substs, succ, unwind);
365 self.unwind.map(|unwind| self.box_free_block(adt, substs, unwind, Unwind::InCleanup));
367 self.drop_subpath(&interior, interior_path, succ, unwind_succ)
370 fn open_drop_for_adt(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
371 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
372 if adt.variants.len() == 0 {
373 return self.elaborator.patch().new_block(BasicBlockData {
375 terminator: Some(Terminator {
376 source_info: self.source_info,
377 kind: TerminatorKind::Unreachable,
379 is_cleanup: self.unwind.is_cleanup(),
384 adt.is_union() || Some(adt.did) == self.tcx().lang_items().manually_drop();
385 let contents_drop = if skip_contents {
386 (self.succ, self.unwind)
388 self.open_drop_for_adt_contents(adt, substs)
391 if adt.has_dtor(self.tcx()) {
392 self.destructor_call_block(contents_drop)
398 fn open_drop_for_adt_contents(
400 adt: &'tcx ty::AdtDef,
401 substs: SubstsRef<'tcx>,
402 ) -> (BasicBlock, Unwind) {
403 let (succ, unwind) = self.drop_ladder_bottom();
405 let fields = self.move_paths_for_fields(
408 &adt.variants[VariantIdx::new(0)],
411 self.drop_ladder(fields, succ, unwind)
413 self.open_drop_for_multivariant(adt, substs, succ, unwind)
417 fn open_drop_for_multivariant(
419 adt: &'tcx ty::AdtDef,
420 substs: SubstsRef<'tcx>,
423 ) -> (BasicBlock, Unwind) {
424 let mut values = Vec::with_capacity(adt.variants.len());
425 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
426 let mut unwind_blocks =
427 if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants.len())) };
429 let mut have_otherwise = false;
430 let tcx = self.tcx();
432 for (variant_index, discr) in adt.discriminants(tcx) {
433 let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
434 if let Some(variant_path) = subpath {
435 let base_place = tcx.mk_place_elem(
437 ProjectionElem::Downcast(
438 Some(adt.variants[variant_index].ident.name),
442 let fields = self.move_paths_for_fields(
445 &adt.variants[variant_index],
448 values.push(discr.val);
449 if let Unwind::To(unwind) = unwind {
450 // We can't use the half-ladder from the original
451 // drop ladder, because this breaks the
452 // "funclet can't have 2 successor funclets"
453 // requirement from MSVC:
455 // switch unwind-switch
457 // v1.0 v2.0 v2.0-unwind v1.0-unwind
459 // v1.1-unwind v2.1-unwind |
461 // \-------------------------------/
463 // Create a duplicate half-ladder to avoid that. We
464 // could technically only do this on MSVC, but I
465 // I want to minimize the divergence between MSVC
468 let unwind_blocks = unwind_blocks.as_mut().unwrap();
469 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
470 let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields);
471 unwind_blocks.push(halfladder.last().cloned().unwrap());
473 let (normal, _) = self.drop_ladder(fields, succ, unwind);
474 normal_blocks.push(normal);
476 have_otherwise = true;
481 normal_blocks.push(self.drop_block(succ, unwind));
482 if let Unwind::To(unwind) = unwind {
483 unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
490 self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
491 unwind.map(|unwind| {
492 self.adt_switch_block(
494 unwind_blocks.unwrap(),
505 adt: &'tcx ty::AdtDef,
506 blocks: Vec<BasicBlock>,
511 // If there are multiple variants, then if something
512 // is present within the enum the discriminant, tracked
513 // by the rest path, must be initialized.
515 // Additionally, we do not want to switch on the
516 // discriminant after it is free-ed, because that
517 // way lies only trouble.
518 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
519 let discr = Place::from(self.new_temp(discr_ty));
520 let discr_rv = Rvalue::Discriminant(*self.place);
521 let switch_block = BasicBlockData {
522 statements: vec![self.assign(&discr, discr_rv)],
523 terminator: Some(Terminator {
524 source_info: self.source_info,
525 kind: TerminatorKind::SwitchInt {
526 discr: Operand::Move(discr),
528 values: From::from(values.to_owned()),
532 is_cleanup: unwind.is_cleanup(),
534 let switch_block = self.elaborator.patch().new_block(switch_block);
535 self.drop_flag_test_block(switch_block, succ, unwind)
538 fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
539 debug!("destructor_call_block({:?}, {:?})", self, succ);
540 let tcx = self.tcx();
541 let drop_trait = tcx.lang_items().drop_trait().unwrap();
542 let drop_fn = tcx.associated_items(drop_trait)[0];
543 let ty = self.place_ty(self.place);
544 let substs = tcx.mk_substs_trait(ty, &[]);
547 tcx.mk_ref(tcx.lifetimes.re_erased, ty::TypeAndMut { ty, mutbl: hir::Mutability::Mut });
548 let ref_place = self.new_temp(ref_ty);
549 let unit_temp = Place::from(self.new_temp(tcx.mk_unit()));
551 let result = BasicBlockData {
552 statements: vec![self.assign(
553 &Place::from(ref_place),
555 tcx.lifetimes.re_erased,
556 BorrowKind::Mut { allow_two_phase_borrow: false },
560 terminator: Some(Terminator {
561 kind: TerminatorKind::Call {
562 func: Operand::function_handle(
566 self.source_info.span,
568 args: vec![Operand::Move(Place::from(ref_place))],
569 destination: Some((unit_temp, succ)),
570 cleanup: unwind.into_option(),
573 source_info: self.source_info,
575 is_cleanup: unwind.is_cleanup(),
577 self.elaborator.patch().new_block(result)
580 /// Create a loop that drops an array:
584 /// can_go = cur == length_or_end
585 /// if can_go then succ else drop-block
589 /// cur = cur.offset(1)
591 /// ptr = &raw mut P[cur]
600 length_or_end: &Place<'tcx>,
605 let copy = |place: Place<'tcx>| Operand::Copy(place);
606 let move_ = |place: Place<'tcx>| Operand::Move(place);
607 let tcx = self.tcx();
609 let ptr_ty = tcx.mk_ptr(ty::TypeAndMut { ty: ety, mutbl: hir::Mutability::Mut });
610 let ptr = &Place::from(self.new_temp(ptr_ty));
611 let can_go = Place::from(self.new_temp(tcx.types.bool));
613 let one = self.constant_usize(1);
614 let (ptr_next, cur_next) = if ptr_based {
615 (Rvalue::Use(copy(cur.into())), Rvalue::BinaryOp(BinOp::Offset, move_(cur.into()), one))
618 Rvalue::AddressOf(Mutability::Mut, tcx.mk_place_index(self.place.clone(), cur)),
619 Rvalue::BinaryOp(BinOp::Add, move_(cur.into()), one),
623 let drop_block = BasicBlockData {
624 statements: vec![self.assign(ptr, ptr_next), self.assign(&Place::from(cur), cur_next)],
625 is_cleanup: unwind.is_cleanup(),
626 terminator: Some(Terminator {
627 source_info: self.source_info,
628 // this gets overwritten by drop elaboration.
629 kind: TerminatorKind::Unreachable,
632 let drop_block = self.elaborator.patch().new_block(drop_block);
634 let loop_block = BasicBlockData {
635 statements: vec![self.assign(
637 Rvalue::BinaryOp(BinOp::Eq, copy(Place::from(cur)), copy(*length_or_end)),
639 is_cleanup: unwind.is_cleanup(),
640 terminator: Some(Terminator {
641 source_info: self.source_info,
642 kind: TerminatorKind::if_(tcx, move_(can_go), succ, drop_block),
645 let loop_block = self.elaborator.patch().new_block(loop_block);
647 self.elaborator.patch().patch_terminator(
649 TerminatorKind::Drop {
650 location: tcx.mk_place_deref(ptr.clone()),
652 unwind: unwind.into_option(),
659 fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock {
660 debug!("open_drop_for_array({:?}, {:?})", ety, opt_size);
662 // if size_of::<ety>() == 0 {
668 let tcx = self.tcx();
670 if let Some(size) = opt_size {
671 let size: u32 = size.try_into().unwrap_or_else(|_| {
672 bug!("move out check isn't implemented for array sizes bigger than u32::MAX");
674 let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size)
679 ProjectionElem::ConstantIndex {
685 self.elaborator.array_subpath(self.path, i, size),
690 if fields.iter().any(|(_, path)| path.is_some()) {
691 let (succ, unwind) = self.drop_ladder_bottom();
692 return self.drop_ladder(fields, succ, unwind).0;
696 let move_ = |place: &Place<'tcx>| Operand::Move(*place);
697 let elem_size = &Place::from(self.new_temp(tcx.types.usize));
698 let len = &Place::from(self.new_temp(tcx.types.usize));
700 static USIZE_SWITCH_ZERO: &[u128] = &[0];
702 let base_block = BasicBlockData {
704 self.assign(elem_size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
705 self.assign(len, Rvalue::Len(*self.place)),
707 is_cleanup: self.unwind.is_cleanup(),
708 terminator: Some(Terminator {
709 source_info: self.source_info,
710 kind: TerminatorKind::SwitchInt {
711 discr: move_(elem_size),
712 switch_ty: tcx.types.usize,
713 values: From::from(USIZE_SWITCH_ZERO),
715 self.drop_loop_pair(ety, false, len.clone()),
716 self.drop_loop_pair(ety, true, len.clone()),
721 self.elaborator.patch().new_block(base_block)
724 /// Ceates a pair of drop-loops of `place`, which drops its contents, even
725 /// in the case of 1 panic. If `ptr_based`, creates a pointer loop,
726 /// otherwise create an index loop.
733 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
734 let tcx = self.tcx();
735 let iter_ty = if ptr_based { tcx.mk_mut_ptr(ety) } else { tcx.types.usize };
737 let cur = self.new_temp(iter_ty);
738 let length_or_end = if ptr_based { Place::from(self.new_temp(iter_ty)) } else { length };
740 let unwind = self.unwind.map(|unwind| {
741 self.drop_loop(unwind, cur, &length_or_end, ety, Unwind::InCleanup, ptr_based)
744 let loop_block = self.drop_loop(self.succ, cur, &length_or_end, ety, unwind, ptr_based);
746 let cur = Place::from(cur);
747 let drop_block_stmts = if ptr_based {
748 let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
749 let tmp = Place::from(self.new_temp(tmp_ty));
751 // cur = tmp as *mut T;
752 // end = Offset(cur, len);
754 self.assign(&tmp, Rvalue::AddressOf(Mutability::Mut, *self.place)),
755 self.assign(&cur, Rvalue::Cast(CastKind::Misc, Operand::Move(tmp), iter_ty)),
758 Rvalue::BinaryOp(BinOp::Offset, Operand::Copy(cur), Operand::Move(length)),
762 // cur = 0 (length already pushed)
763 let zero = self.constant_usize(0);
764 vec![self.assign(&cur, Rvalue::Use(zero))]
766 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
767 statements: drop_block_stmts,
768 is_cleanup: unwind.is_cleanup(),
769 terminator: Some(Terminator {
770 source_info: self.source_info,
771 kind: TerminatorKind::Goto { target: loop_block },
775 // FIXME(#34708): handle partially-dropped array/slice elements.
776 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
777 self.drop_flag_test_block(reset_block, self.succ, unwind)
780 /// The slow-path - create an "open", elaborated drop for a type
781 /// which is moved-out-of only partially, and patch `bb` to a jump
782 /// to it. This must not be called on ADTs with a destructor,
783 /// as these can't be moved-out-of, except for `Box<T>`, which is
786 /// This creates a "drop ladder" that drops the needed fields of the
787 /// ADT, both in the success case or if one of the destructors fail.
788 fn open_drop(&mut self) -> BasicBlock {
789 let ty = self.place_ty(self.place);
791 ty::Closure(def_id, substs) => {
792 let tys: Vec<_> = substs.as_closure().upvar_tys(def_id, self.tcx()).collect();
793 self.open_drop_for_tuple(&tys)
795 // Note that `elaborate_drops` only drops the upvars of a generator,
796 // and this is ok because `open_drop` here can only be reached
797 // within that own generator's resume function.
798 // This should only happen for the self argument on the resume function.
799 // It effetively only contains upvars until the generator transformation runs.
800 // See librustc_body/transform/generator.rs for more details.
801 ty::Generator(def_id, substs, _) => {
802 let tys: Vec<_> = substs.as_generator().upvar_tys(def_id, self.tcx()).collect();
803 self.open_drop_for_tuple(&tys)
806 let tys: Vec<_> = ty.tuple_fields().collect();
807 self.open_drop_for_tuple(&tys)
809 ty::Adt(def, substs) => {
811 self.open_drop_for_box(def, substs)
813 self.open_drop_for_adt(def, substs)
817 let unwind = self.unwind; // FIXME(#43234)
818 let succ = self.succ;
819 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
821 ty::Array(ety, size) => {
822 let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env());
823 self.open_drop_for_array(ety, size)
825 ty::Slice(ety) => self.open_drop_for_array(ety, None),
827 _ => bug!("open drop from non-ADT `{:?}`", ty),
831 /// Returns a basic block that drop a place using the context
832 /// and path in `c`. If `mode` is something, also clear `c`
835 /// if FLAG(self.path)
836 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
840 drop_mode: Option<DropFlagMode>,
844 debug!("complete_drop({:?},{:?})", self, drop_mode);
846 let drop_block = self.drop_block(succ, unwind);
847 let drop_block = if let Some(mode) = drop_mode {
848 self.drop_flag_reset_block(mode, drop_block, unwind)
853 self.drop_flag_test_block(drop_block, succ, unwind)
856 fn drop_flag_reset_block(
862 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
864 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
865 let block_start = Location { block: block, statement_index: 0 };
866 self.elaborator.clear_drop_flag(block_start, self.path, mode);
870 fn elaborated_drop_block(&mut self) -> BasicBlock {
871 debug!("elaborated_drop_block({:?})", self);
872 let unwind = self.unwind; // FIXME(#43234)
873 let succ = self.succ;
874 let blk = self.drop_block(succ, unwind);
875 self.elaborate_drop(blk);
881 adt: &'tcx ty::AdtDef,
882 substs: SubstsRef<'tcx>,
886 let block = self.unelaborated_free_block(adt, substs, target, unwind);
887 self.drop_flag_test_block(block, target, unwind)
890 fn unelaborated_free_block(
892 adt: &'tcx ty::AdtDef,
893 substs: SubstsRef<'tcx>,
897 let tcx = self.tcx();
898 let unit_temp = Place::from(self.new_temp(tcx.mk_unit()));
900 tcx.require_lang_item(lang_items::BoxFreeFnLangItem, Some(self.source_info.span));
901 let args = adt.variants[VariantIdx::new(0)]
906 let field = Field::new(i);
907 let field_ty = f.ty(tcx, substs);
908 Operand::Move(tcx.mk_place_field(self.place.clone(), field, field_ty))
912 let call = TerminatorKind::Call {
913 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
915 destination: Some((unit_temp, target)),
917 from_hir_call: false,
919 let free_block = self.new_block(unwind, call);
921 let block_start = Location { block: free_block, statement_index: 0 };
922 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
926 fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
928 TerminatorKind::Drop { location: *self.place, target, unwind: unwind.into_option() };
929 self.new_block(unwind, block)
932 fn drop_flag_test_block(
935 on_unset: BasicBlock,
938 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
940 "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
941 self, on_set, on_unset, unwind, style
945 DropStyle::Dead => on_unset,
946 DropStyle::Static => on_set,
947 DropStyle::Conditional | DropStyle::Open => {
948 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
949 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
950 self.new_block(unwind, term)
955 fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
956 self.elaborator.patch().new_block(BasicBlockData {
958 terminator: Some(Terminator { source_info: self.source_info, kind: k }),
959 is_cleanup: unwind.is_cleanup(),
963 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
964 self.elaborator.patch().new_temp(ty, self.source_info.span)
967 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
968 let body = self.elaborator.body();
969 self.elaborator.patch().terminator_loc(body, bb)
972 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
973 Operand::Constant(box Constant {
974 span: self.source_info.span,
976 literal: ty::Const::from_usize(self.tcx(), val.into()),
980 fn assign(&self, lhs: &Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
981 Statement { source_info: self.source_info, kind: StatementKind::Assign(box (*lhs, rhs)) }