1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
14 use rustc::middle::lang_items;
15 use rustc::traits::Reveal;
16 use rustc::ty::{self, Ty, TyCtxt};
17 use rustc::ty::layout::VariantIdx;
18 use rustc::ty::subst::Substs;
19 use rustc::ty::util::IntTypeExt;
20 use rustc_data_structures::indexed_vec::Idx;
21 use util::patch::MirPatch;
25 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
26 pub enum DropFlagState {
27 Present, // i.e., initialized
28 Absent, // i.e., deinitialized or "moved"
32 pub fn value(self) -> bool {
34 DropFlagState::Present => true,
35 DropFlagState::Absent => false
49 pub enum DropFlagMode {
54 #[derive(Copy, Clone, Debug)]
61 fn is_cleanup(self) -> bool {
63 Unwind::To(..) => false,
64 Unwind::InCleanup => true
68 fn into_option(self) -> Option<BasicBlock> {
70 Unwind::To(bb) => Some(bb),
71 Unwind::InCleanup => None,
75 fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
77 Unwind::To(bb) => Unwind::To(f(bb)),
78 Unwind::InCleanup => Unwind::InCleanup
83 pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
84 type Path : Copy + fmt::Debug;
86 fn patch(&mut self) -> &mut MirPatch<'tcx>;
87 fn mir(&self) -> &'a Mir<'tcx>;
88 fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
89 fn param_env(&self) -> ty::ParamEnv<'tcx>;
91 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
92 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
93 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
96 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
97 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
98 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
99 fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path>;
103 struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
104 where D : DropElaborator<'b, 'tcx> + 'l
106 elaborator: &'l mut D,
108 source_info: SourceInfo,
110 place: &'l Place<'tcx>,
116 pub fn elaborate_drop<'b, 'tcx, D>(
118 source_info: SourceInfo,
124 where D: DropElaborator<'b, 'tcx>
127 elaborator, source_info, place, path, succ, unwind
131 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
132 where D: DropElaborator<'b, 'tcx>
134 fn place_ty(&self, place: &Place<'tcx>) -> Ty<'tcx> {
135 place.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
138 fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
139 self.elaborator.tcx()
142 /// This elaborates a single drop instruction, located at `bb`, and
145 /// The elaborated drop checks the drop flags to only drop what
148 /// In addition, the relevant drop flags also need to be cleared
149 /// to avoid double-drops. However, in the middle of a complex
150 /// drop, one must avoid clearing some of the flags before they
151 /// are read, as that would cause a memory leak.
153 /// In particular, when dropping an ADT, multiple fields may be
154 /// joined together under the `rest` subpath. They are all controlled
155 /// by the primary drop flag, but only the last rest-field dropped
156 /// should clear it (and it must also not clear anything else).
158 /// FIXME: I think we should just control the flags externally
159 /// and then we do not need this machinery.
160 pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
161 debug!("elaborate_drop({:?})", self);
162 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
163 debug!("elaborate_drop({:?}): live - {:?}", self, style);
166 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
170 DropStyle::Static => {
171 let loc = self.terminator_loc(bb);
172 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
173 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
174 location: self.place.clone(),
176 unwind: self.unwind.into_option(),
179 DropStyle::Conditional => {
180 let unwind = self.unwind; // FIXME(#43234)
181 let succ = self.succ;
182 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
183 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
188 let drop_bb = self.open_drop();
189 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
196 /// Return the place and move path for each field of `variant`,
197 /// (the move path is `None` if the field is a rest field).
198 fn move_paths_for_fields(&self,
199 base_place: &Place<'tcx>,
200 variant_path: D::Path,
201 variant: &'tcx ty::VariantDef,
202 substs: &'tcx Substs<'tcx>)
203 -> Vec<(Place<'tcx>, Option<D::Path>)>
205 variant.fields.iter().enumerate().map(|(i, f)| {
206 let field = Field::new(i);
207 let subpath = self.elaborator.field_subpath(variant_path, field);
209 assert_eq!(self.elaborator.param_env().reveal, Reveal::All);
210 let field_ty = self.tcx().normalize_erasing_regions(
211 self.elaborator.param_env(),
212 f.ty(self.tcx(), substs),
214 (base_place.clone().field(field, field_ty), subpath)
218 fn drop_subpath(&mut self,
220 path: Option<D::Path>,
225 if let Some(path) = path {
226 debug!("drop_subpath: for std field {:?}", place);
229 elaborator: self.elaborator,
230 source_info: self.source_info,
231 path, place, succ, unwind,
232 }.elaborated_drop_block()
234 debug!("drop_subpath: for rest field {:?}", place);
237 elaborator: self.elaborator,
238 source_info: self.source_info,
240 // Using `self.path` here to condition the drop on
241 // our own drop flag.
243 }.complete_drop(None, succ, unwind)
247 /// Create one-half of the drop ladder for a list of fields, and return
248 /// the list of steps in it in reverse order, with the first step
249 /// dropping 0 fields and so on.
251 /// `unwind_ladder` is such a list of steps in reverse order,
252 /// which is called if the matching step of the drop glue panics.
253 fn drop_halfladder(&mut self,
254 unwind_ladder: &[Unwind],
255 mut succ: BasicBlock,
256 fields: &[(Place<'tcx>, Option<D::Path>)])
259 Some(succ).into_iter().chain(
260 fields.iter().rev().zip(unwind_ladder)
261 .map(|(&(ref place, path), &unwind_succ)| {
262 succ = self.drop_subpath(place, path, succ, unwind_succ);
268 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
269 // Clear the "master" drop flag at the end. This is needed
270 // because the "master" drop protects the ADT's discriminant,
271 // which is invalidated after the ADT is dropped.
272 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
274 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
275 unwind.map(|unwind| {
276 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
281 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
283 /// For example, with 3 fields, the drop ladder is
286 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
288 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
290 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
292 /// ELAB(drop location.1 [target=.c2])
294 /// ELAB(drop location.2 [target=`self.unwind`])
296 /// NOTE: this does not clear the master drop flag, so you need
297 /// to point succ/unwind on a `drop_ladder_bottom`.
298 fn drop_ladder<'a>(&mut self,
299 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
302 -> (BasicBlock, Unwind)
304 debug!("drop_ladder({:?}, {:?})", self, fields);
306 let mut fields = fields;
307 fields.retain(|&(ref place, _)| {
308 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
311 debug!("drop_ladder - fields needing drop: {:?}", fields);
313 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
314 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
315 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
316 halfladder.into_iter().map(Unwind::To).collect()
322 self.drop_halfladder(&unwind_ladder, succ, &fields);
324 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
327 fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
330 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
332 let fields = tys.iter().enumerate().map(|(i, &ty)| {
333 (self.place.clone().field(Field::new(i), ty),
334 self.elaborator.field_subpath(self.path, Field::new(i)))
337 let (succ, unwind) = self.drop_ladder_bottom();
338 self.drop_ladder(fields, succ, unwind).0
341 fn open_drop_for_box<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
344 debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
346 let interior = self.place.clone().deref();
347 let interior_path = self.elaborator.deref_subpath(self.path);
349 let succ = self.succ; // FIXME(#43234)
350 let unwind = self.unwind;
351 let succ = self.box_free_block(adt, substs, succ, unwind);
352 let unwind_succ = self.unwind.map(|unwind| {
353 self.box_free_block(adt, substs, unwind, Unwind::InCleanup)
356 self.drop_subpath(&interior, interior_path, succ, unwind_succ)
359 fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
361 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
362 if adt.variants.len() == 0 {
363 return self.elaborator.patch().new_block(BasicBlockData {
365 terminator: Some(Terminator {
366 source_info: self.source_info,
367 kind: TerminatorKind::Unreachable
369 is_cleanup: self.unwind.is_cleanup()
374 adt.is_union() || Some(adt.did) == self.tcx().lang_items().manually_drop();
375 let contents_drop = if skip_contents {
376 (self.succ, self.unwind)
378 self.open_drop_for_adt_contents(adt, substs)
381 if adt.has_dtor(self.tcx()) {
382 self.destructor_call_block(contents_drop)
388 fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
389 substs: &'tcx Substs<'tcx>)
390 -> (BasicBlock, Unwind) {
391 let (succ, unwind) = self.drop_ladder_bottom();
393 let fields = self.move_paths_for_fields(
396 &adt.variants[VariantIdx::new(0)],
399 self.drop_ladder(fields, succ, unwind)
401 self.open_drop_for_multivariant(adt, substs, succ, unwind)
405 fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
406 substs: &'tcx Substs<'tcx>,
409 -> (BasicBlock, Unwind) {
410 let mut values = Vec::with_capacity(adt.variants.len());
411 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
412 let mut unwind_blocks = if unwind.is_cleanup() {
415 Some(Vec::with_capacity(adt.variants.len()))
418 let mut have_otherwise = false;
420 for (variant_index, discr) in adt.discriminants(self.tcx()) {
421 let subpath = self.elaborator.downcast_subpath(
422 self.path, variant_index);
423 if let Some(variant_path) = subpath {
424 let base_place = self.place.clone().elem(
425 ProjectionElem::Downcast(adt, variant_index)
427 let fields = self.move_paths_for_fields(
430 &adt.variants[variant_index],
432 values.push(discr.val);
433 if let Unwind::To(unwind) = unwind {
434 // We can't use the half-ladder from the original
435 // drop ladder, because this breaks the
436 // "funclet can't have 2 successor funclets"
437 // requirement from MSVC:
439 // switch unwind-switch
441 // v1.0 v2.0 v2.0-unwind v1.0-unwind
443 // v1.1-unwind v2.1-unwind |
445 // \-------------------------------/
447 // Create a duplicate half-ladder to avoid that. We
448 // could technically only do this on MSVC, but I
449 // I want to minimize the divergence between MSVC
452 let unwind_blocks = unwind_blocks.as_mut().unwrap();
453 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
455 self.drop_halfladder(&unwind_ladder, unwind, &fields);
456 unwind_blocks.push(halfladder.last().cloned().unwrap());
458 let (normal, _) = self.drop_ladder(fields, succ, unwind);
459 normal_blocks.push(normal);
461 have_otherwise = true;
466 normal_blocks.push(self.drop_block(succ, unwind));
467 if let Unwind::To(unwind) = unwind {
468 unwind_blocks.as_mut().unwrap().push(
469 self.drop_block(unwind, Unwind::InCleanup)
476 (self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
477 unwind.map(|unwind| {
478 self.adt_switch_block(
479 adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
484 fn adt_switch_block(&mut self,
485 adt: &'tcx ty::AdtDef,
486 blocks: Vec<BasicBlock>,
491 // If there are multiple variants, then if something
492 // is present within the enum the discriminant, tracked
493 // by the rest path, must be initialized.
495 // Additionally, we do not want to switch on the
496 // discriminant after it is free-ed, because that
497 // way lies only trouble.
498 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
499 let discr = Place::Local(self.new_temp(discr_ty));
500 let discr_rv = Rvalue::Discriminant(self.place.clone());
501 let switch_block = BasicBlockData {
502 statements: vec![self.assign(&discr, discr_rv)],
503 terminator: Some(Terminator {
504 source_info: self.source_info,
505 kind: TerminatorKind::SwitchInt {
506 discr: Operand::Move(discr),
508 values: From::from(values.to_owned()),
512 is_cleanup: unwind.is_cleanup(),
514 let switch_block = self.elaborator.patch().new_block(switch_block);
515 self.drop_flag_test_block(switch_block, succ, unwind)
518 fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
521 debug!("destructor_call_block({:?}, {:?})", self, succ);
522 let tcx = self.tcx();
523 let drop_trait = tcx.lang_items().drop_trait().unwrap();
524 let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
525 let ty = self.place_ty(self.place);
526 let substs = tcx.mk_substs_trait(ty, &[]);
528 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
530 mutbl: hir::Mutability::MutMutable
532 let ref_place = self.new_temp(ref_ty);
533 let unit_temp = Place::Local(self.new_temp(tcx.mk_unit()));
535 let result = BasicBlockData {
536 statements: vec![self.assign(
537 &Place::Local(ref_place),
538 Rvalue::Ref(tcx.types.re_erased,
539 BorrowKind::Mut { allow_two_phase_borrow: false },
542 terminator: Some(Terminator {
543 kind: TerminatorKind::Call {
544 func: Operand::function_handle(tcx, drop_fn.def_id, substs,
545 self.source_info.span),
546 args: vec![Operand::Move(Place::Local(ref_place))],
547 destination: Some((unit_temp, succ)),
548 cleanup: unwind.into_option(),
551 source_info: self.source_info,
553 is_cleanup: unwind.is_cleanup(),
555 self.elaborator.patch().new_block(result)
558 /// create a loop that drops an array:
563 /// can_go = cur == length_or_end
564 /// if can_go then succ else drop-block
568 /// cur = cur.offset(1)
570 /// ptr = &mut P[cur]
574 fn drop_loop(&mut self,
577 length_or_end: &Place<'tcx>,
583 let copy = |place: &Place<'tcx>| Operand::Copy(place.clone());
584 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
585 let tcx = self.tcx();
587 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
589 mutbl: hir::Mutability::MutMutable
591 let ptr = &Place::Local(self.new_temp(ref_ty));
592 let can_go = &Place::Local(self.new_temp(tcx.types.bool));
594 let one = self.constant_usize(1);
595 let (ptr_next, cur_next) = if ptr_based {
598 BorrowKind::Mut { allow_two_phase_borrow: false },
599 Place::Projection(Box::new(Projection {
600 base: Place::Local(cur),
601 elem: ProjectionElem::Deref,
604 Rvalue::BinaryOp(BinOp::Offset, copy(&Place::Local(cur)), one))
608 BorrowKind::Mut { allow_two_phase_borrow: false },
609 self.place.clone().index(cur)),
610 Rvalue::BinaryOp(BinOp::Add, copy(&Place::Local(cur)), one))
613 let drop_block = BasicBlockData {
615 self.assign(ptr, ptr_next),
616 self.assign(&Place::Local(cur), cur_next)
618 is_cleanup: unwind.is_cleanup(),
619 terminator: Some(Terminator {
620 source_info: self.source_info,
621 // this gets overwritten by drop elaboration.
622 kind: TerminatorKind::Unreachable,
625 let drop_block = self.elaborator.patch().new_block(drop_block);
627 let loop_block = BasicBlockData {
629 self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
630 copy(&Place::Local(cur)),
631 copy(length_or_end)))
633 is_cleanup: unwind.is_cleanup(),
634 terminator: Some(Terminator {
635 source_info: self.source_info,
636 kind: TerminatorKind::if_(tcx, move_(can_go), succ, drop_block)
639 let loop_block = self.elaborator.patch().new_block(loop_block);
641 self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
642 location: ptr.clone().deref(),
644 unwind: unwind.into_option()
650 fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock {
651 debug!("open_drop_for_array({:?}, {:?})", ety, opt_size);
653 // if size_of::<ety>() == 0 {
659 if let Some(size) = opt_size {
660 assert!(size <= (u32::MAX as u64),
661 "move out check doesn't implemented for array bigger then u32");
662 let size = size as u32;
663 let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size).map(|i| {
664 (self.place.clone().elem(ProjectionElem::ConstantIndex{
669 self.elaborator.array_subpath(self.path, i, size))
672 if fields.iter().any(|(_,path)| path.is_some()) {
673 let (succ, unwind) = self.drop_ladder_bottom();
674 return self.drop_ladder(fields, succ, unwind).0
678 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
679 let tcx = self.tcx();
680 let size = &Place::Local(self.new_temp(tcx.types.usize));
681 let size_is_zero = &Place::Local(self.new_temp(tcx.types.bool));
682 let base_block = BasicBlockData {
684 self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
685 self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
687 self.constant_usize(0)))
689 is_cleanup: self.unwind.is_cleanup(),
690 terminator: Some(Terminator {
691 source_info: self.source_info,
692 kind: TerminatorKind::if_(
695 self.drop_loop_pair(ety, false),
696 self.drop_loop_pair(ety, true)
700 self.elaborator.patch().new_block(base_block)
703 // create a pair of drop-loops of `place`, which drops its contents
704 // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
705 // otherwise create an index loop.
706 fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
707 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
708 let tcx = self.tcx();
709 let iter_ty = if ptr_based {
715 let cur = self.new_temp(iter_ty);
716 let length = Place::Local(self.new_temp(tcx.types.usize));
717 let length_or_end = if ptr_based {
718 Place::Local(self.new_temp(iter_ty))
723 let unwind = self.unwind.map(|unwind| {
724 self.drop_loop(unwind,
732 let succ = self.succ; // FIXME(#43234)
733 let loop_block = self.drop_loop(
741 let cur = Place::Local(cur);
742 let zero = self.constant_usize(0);
743 let mut drop_block_stmts = vec![];
744 drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.place.clone())));
746 let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
747 let tmp = Place::Local(self.new_temp(tmp_ty));
749 // cur = tmp as *mut T;
750 // end = Offset(cur, len);
751 drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
753 BorrowKind::Mut { allow_two_phase_borrow: false },
756 drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
757 CastKind::Misc, Operand::Move(tmp), iter_ty
759 drop_block_stmts.push(self.assign(&length_or_end,
760 Rvalue::BinaryOp(BinOp::Offset,
761 Operand::Copy(cur), Operand::Move(length)
764 // index = 0 (length already pushed)
765 drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
767 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
768 statements: drop_block_stmts,
769 is_cleanup: unwind.is_cleanup(),
770 terminator: Some(Terminator {
771 source_info: self.source_info,
772 kind: TerminatorKind::Goto { target: loop_block }
776 // FIXME(#34708): handle partially-dropped array/slice elements.
777 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
778 self.drop_flag_test_block(reset_block, succ, unwind)
781 /// The slow-path - create an "open", elaborated drop for a type
782 /// which is moved-out-of only partially, and patch `bb` to a jump
783 /// to it. This must not be called on ADTs with a destructor,
784 /// as these can't be moved-out-of, except for `Box<T>`, which is
787 /// This creates a "drop ladder" that drops the needed fields of the
788 /// ADT, both in the success case or if one of the destructors fail.
789 fn open_drop<'a>(&mut self) -> BasicBlock {
790 let ty = self.place_ty(self.place);
792 ty::Closure(def_id, substs) => {
793 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
794 self.open_drop_for_tuple(&tys)
796 // Note that `elaborate_drops` only drops the upvars of a generator,
797 // and this is ok because `open_drop` here can only be reached
798 // within that own generator's resume function.
799 // This should only happen for the self argument on the resume function.
800 // It effetively only contains upvars until the generator transformation runs.
801 // See librustc_mir/transform/generator.rs for more details.
802 ty::Generator(def_id, substs, _) => {
803 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
804 self.open_drop_for_tuple(&tys)
807 self.open_drop_for_tuple(tys)
809 ty::Adt(def, substs) => {
811 self.open_drop_for_box(def, substs)
813 self.open_drop_for_adt(def, substs)
817 let unwind = self.unwind; // FIXME(#43234)
818 let succ = self.succ;
819 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
821 ty::Array(ety, size) => {
822 let size = size.assert_usize(self.tcx());
823 self.open_drop_for_array(ety, size)
825 ty::Slice(ety) => self.open_drop_for_array(ety, None),
827 _ => bug!("open drop from non-ADT `{:?}`", ty)
831 /// Return a basic block that drop a place using the context
832 /// and path in `c`. If `mode` is something, also clear `c`
835 /// if FLAG(self.path)
836 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
838 fn complete_drop<'a>(&mut self,
839 drop_mode: Option<DropFlagMode>,
841 unwind: Unwind) -> BasicBlock
843 debug!("complete_drop({:?},{:?})", self, drop_mode);
845 let drop_block = self.drop_block(succ, unwind);
846 let drop_block = if let Some(mode) = drop_mode {
847 self.drop_flag_reset_block(mode, drop_block, unwind)
852 self.drop_flag_test_block(drop_block, succ, unwind)
855 fn drop_flag_reset_block(&mut self,
858 unwind: Unwind) -> BasicBlock
860 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
862 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
863 let block_start = Location { block: block, statement_index: 0 };
864 self.elaborator.clear_drop_flag(block_start, self.path, mode);
868 fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
869 debug!("elaborated_drop_block({:?})", self);
870 let unwind = self.unwind; // FIXME(#43234)
871 let succ = self.succ;
872 let blk = self.drop_block(succ, unwind);
873 self.elaborate_drop(blk);
877 fn box_free_block<'a>(
879 adt: &'tcx ty::AdtDef,
880 substs: &'tcx Substs<'tcx>,
884 let block = self.unelaborated_free_block(adt, substs, target, unwind);
885 self.drop_flag_test_block(block, target, unwind)
888 fn unelaborated_free_block<'a>(
890 adt: &'tcx ty::AdtDef,
891 substs: &'tcx Substs<'tcx>,
895 let tcx = self.tcx();
896 let unit_temp = Place::Local(self.new_temp(tcx.mk_unit()));
897 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
898 let args = adt.variants[VariantIdx::new(0)].fields.iter().enumerate().map(|(i, f)| {
899 let field = Field::new(i);
900 let field_ty = f.ty(self.tcx(), substs);
901 Operand::Move(self.place.clone().field(field, field_ty))
904 let call = TerminatorKind::Call {
905 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
907 destination: Some((unit_temp, target)),
909 from_hir_call: false,
911 let free_block = self.new_block(unwind, call);
913 let block_start = Location { block: free_block, statement_index: 0 };
914 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
918 fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
919 let block = TerminatorKind::Drop {
920 location: self.place.clone(),
922 unwind: unwind.into_option()
924 self.new_block(unwind, block)
927 fn drop_flag_test_block(&mut self,
929 on_unset: BasicBlock,
933 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
934 debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
935 self, on_set, on_unset, unwind, style);
938 DropStyle::Dead => on_unset,
939 DropStyle::Static => on_set,
940 DropStyle::Conditional | DropStyle::Open => {
941 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
942 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
943 self.new_block(unwind, term)
948 fn new_block<'a>(&mut self,
950 k: TerminatorKind<'tcx>)
953 self.elaborator.patch().new_block(BasicBlockData {
955 terminator: Some(Terminator {
956 source_info: self.source_info, kind: k
958 is_cleanup: unwind.is_cleanup()
962 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
963 self.elaborator.patch().new_temp(ty, self.source_info.span)
966 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
967 let mir = self.elaborator.mir();
968 self.elaborator.patch().terminator_loc(mir, bb)
971 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
972 Operand::Constant(box Constant {
973 span: self.source_info.span,
974 ty: self.tcx().types.usize,
976 literal: ty::Const::from_usize(self.tcx(), val.into()),
980 fn assign(&self, lhs: &Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
982 source_info: self.source_info,
983 kind: StatementKind::Assign(lhs.clone(), box rhs)