1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
14 use rustc::middle::const_val::{ConstInt, ConstVal};
15 use rustc::middle::lang_items;
16 use rustc::ty::{self, Ty, TyCtxt};
17 use rustc::ty::subst::{Kind, Substs};
18 use rustc::ty::util::IntTypeExt;
19 use rustc_data_structures::indexed_vec::Idx;
20 use util::patch::MirPatch;
24 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
25 pub enum DropFlagState {
26 Present, // i.e. initialized
27 Absent, // i.e. deinitialized or "moved"
31 pub fn value(self) -> bool {
33 DropFlagState::Present => true,
34 DropFlagState::Absent => false
48 pub enum DropFlagMode {
53 #[derive(Copy, Clone, Debug)]
60 fn is_cleanup(self) -> bool {
62 Unwind::To(..) => false,
63 Unwind::InCleanup => true
67 fn into_option(self) -> Option<BasicBlock> {
69 Unwind::To(bb) => Some(bb),
70 Unwind::InCleanup => None,
74 fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
76 Unwind::To(bb) => Unwind::To(f(bb)),
77 Unwind::InCleanup => Unwind::InCleanup
82 pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
83 type Path : Copy + fmt::Debug;
85 fn patch(&mut self) -> &mut MirPatch<'tcx>;
86 fn mir(&self) -> &'a Mir<'tcx>;
87 fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
88 fn param_env(&self) -> ty::ParamEnv<'tcx>;
90 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
91 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
92 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
95 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
96 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
97 fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path>;
98 fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path>;
102 struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
103 where D : DropElaborator<'b, 'tcx> + 'l
105 elaborator: &'l mut D,
107 source_info: SourceInfo,
109 place: &'l Place<'tcx>,
115 pub fn elaborate_drop<'b, 'tcx, D>(
117 source_info: SourceInfo,
123 where D: DropElaborator<'b, 'tcx>
126 elaborator, source_info, place, path, succ, unwind
130 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
131 where D: DropElaborator<'b, 'tcx>
133 fn place_ty(&self, place: &Place<'tcx>) -> Ty<'tcx> {
134 place.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
137 fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
138 self.elaborator.tcx()
141 /// This elaborates a single drop instruction, located at `bb`, and
144 /// The elaborated drop checks the drop flags to only drop what
147 /// In addition, the relevant drop flags also need to be cleared
148 /// to avoid double-drops. However, in the middle of a complex
149 /// drop, one must avoid clearing some of the flags before they
150 /// are read, as that would cause a memory leak.
152 /// In particular, when dropping an ADT, multiple fields may be
153 /// joined together under the `rest` subpath. They are all controlled
154 /// by the primary drop flag, but only the last rest-field dropped
155 /// should clear it (and it must also not clear anything else).
157 /// FIXME: I think we should just control the flags externally
158 /// and then we do not need this machinery.
159 pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
160 debug!("elaborate_drop({:?})", self);
161 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
162 debug!("elaborate_drop({:?}): live - {:?}", self, style);
165 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
169 DropStyle::Static => {
170 let loc = self.terminator_loc(bb);
171 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
172 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
173 location: self.place.clone(),
175 unwind: self.unwind.into_option(),
178 DropStyle::Conditional => {
179 let unwind = self.unwind; // FIXME(#6393)
180 let succ = self.succ;
181 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
182 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
187 let drop_bb = self.open_drop();
188 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
195 /// Return the place and move path for each field of `variant`,
196 /// (the move path is `None` if the field is a rest field).
197 fn move_paths_for_fields(&self,
198 base_place: &Place<'tcx>,
199 variant_path: D::Path,
200 variant: &'tcx ty::VariantDef,
201 substs: &'tcx Substs<'tcx>)
202 -> Vec<(Place<'tcx>, Option<D::Path>)>
204 variant.fields.iter().enumerate().map(|(i, f)| {
205 let field = Field::new(i);
206 let subpath = self.elaborator.field_subpath(variant_path, field);
209 self.tcx().normalize_associated_type_in_env(
210 &f.ty(self.tcx(), substs),
211 self.elaborator.param_env()
213 (base_place.clone().field(field, field_ty), subpath)
217 fn drop_subpath(&mut self,
219 path: Option<D::Path>,
224 if let Some(path) = path {
225 debug!("drop_subpath: for std field {:?}", place);
228 elaborator: self.elaborator,
229 source_info: self.source_info,
230 path, place, succ, unwind,
231 }.elaborated_drop_block()
233 debug!("drop_subpath: for rest field {:?}", place);
236 elaborator: self.elaborator,
237 source_info: self.source_info,
239 // Using `self.path` here to condition the drop on
240 // our own drop flag.
242 }.complete_drop(None, succ, unwind)
246 /// Create one-half of the drop ladder for a list of fields, and return
247 /// the list of steps in it in reverse order, with the first step
248 /// dropping 0 fields and so on.
250 /// `unwind_ladder` is such a list of steps in reverse order,
251 /// which is called if the matching step of the drop glue panics.
252 fn drop_halfladder(&mut self,
253 unwind_ladder: &[Unwind],
254 mut succ: BasicBlock,
255 fields: &[(Place<'tcx>, Option<D::Path>)])
258 Some(succ).into_iter().chain(
259 fields.iter().rev().zip(unwind_ladder)
260 .map(|(&(ref place, path), &unwind_succ)| {
261 succ = self.drop_subpath(place, path, succ, unwind_succ);
267 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
268 // Clear the "master" drop flag at the end. This is needed
269 // because the "master" drop protects the ADT's discriminant,
270 // which is invalidated after the ADT is dropped.
271 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#6393)
273 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
274 unwind.map(|unwind| {
275 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
280 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
282 /// For example, with 3 fields, the drop ladder is
285 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
287 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
289 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
291 /// ELAB(drop location.1 [target=.c2])
293 /// ELAB(drop location.2 [target=`self.unwind`])
295 /// NOTE: this does not clear the master drop flag, so you need
296 /// to point succ/unwind on a `drop_ladder_bottom`.
297 fn drop_ladder<'a>(&mut self,
298 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
301 -> (BasicBlock, Unwind)
303 debug!("drop_ladder({:?}, {:?})", self, fields);
305 let mut fields = fields;
306 fields.retain(|&(ref place, _)| {
307 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
310 debug!("drop_ladder - fields needing drop: {:?}", fields);
312 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
313 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
314 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
315 halfladder.into_iter().map(Unwind::To).collect()
321 self.drop_halfladder(&unwind_ladder, succ, &fields);
323 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
326 fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
329 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
331 let fields = tys.iter().enumerate().map(|(i, &ty)| {
332 (self.place.clone().field(Field::new(i), ty),
333 self.elaborator.field_subpath(self.path, Field::new(i)))
336 let (succ, unwind) = self.drop_ladder_bottom();
337 self.drop_ladder(fields, succ, unwind).0
340 fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
342 debug!("open_drop_for_box({:?}, {:?})", self, ty);
344 let interior = self.place.clone().deref();
345 let interior_path = self.elaborator.deref_subpath(self.path);
347 let succ = self.succ; // FIXME(#6393)
348 let unwind = self.unwind;
349 let succ = self.box_free_block(ty, succ, unwind);
350 let unwind_succ = self.unwind.map(|unwind| {
351 self.box_free_block(ty, unwind, Unwind::InCleanup)
354 self.drop_subpath(&interior, interior_path, succ, unwind_succ)
357 fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
359 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
360 if adt.variants.len() == 0 {
361 return self.elaborator.patch().new_block(BasicBlockData {
363 terminator: Some(Terminator {
364 source_info: self.source_info,
365 kind: TerminatorKind::Unreachable
367 is_cleanup: self.unwind.is_cleanup()
371 let contents_drop = if adt.is_union() {
372 (self.succ, self.unwind)
374 self.open_drop_for_adt_contents(adt, substs)
377 if adt.has_dtor(self.tcx()) {
378 self.destructor_call_block(contents_drop)
384 fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
385 substs: &'tcx Substs<'tcx>)
386 -> (BasicBlock, Unwind) {
387 let (succ, unwind) = self.drop_ladder_bottom();
389 let fields = self.move_paths_for_fields(
395 self.drop_ladder(fields, succ, unwind)
397 self.open_drop_for_multivariant(adt, substs, succ, unwind)
401 fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
402 substs: &'tcx Substs<'tcx>,
405 -> (BasicBlock, Unwind) {
406 let mut values = Vec::with_capacity(adt.variants.len());
407 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
408 let mut unwind_blocks = if unwind.is_cleanup() {
411 Some(Vec::with_capacity(adt.variants.len()))
414 let mut have_otherwise = false;
416 for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
417 let subpath = self.elaborator.downcast_subpath(
418 self.path, variant_index);
419 if let Some(variant_path) = subpath {
420 let base_place = self.place.clone().elem(
421 ProjectionElem::Downcast(adt, variant_index)
423 let fields = self.move_paths_for_fields(
426 &adt.variants[variant_index],
429 if let Unwind::To(unwind) = unwind {
430 // We can't use the half-ladder from the original
431 // drop ladder, because this breaks the
432 // "funclet can't have 2 successor funclets"
433 // requirement from MSVC:
435 // switch unwind-switch
437 // v1.0 v2.0 v2.0-unwind v1.0-unwind
439 // v1.1-unwind v2.1-unwind |
441 // \-------------------------------/
443 // Create a duplicate half-ladder to avoid that. We
444 // could technically only do this on MSVC, but I
445 // I want to minimize the divergence between MSVC
448 let unwind_blocks = unwind_blocks.as_mut().unwrap();
449 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
451 self.drop_halfladder(&unwind_ladder, unwind, &fields);
452 unwind_blocks.push(halfladder.last().cloned().unwrap());
454 let (normal, _) = self.drop_ladder(fields, succ, unwind);
455 normal_blocks.push(normal);
457 have_otherwise = true;
462 normal_blocks.push(self.drop_block(succ, unwind));
463 if let Unwind::To(unwind) = unwind {
464 unwind_blocks.as_mut().unwrap().push(
465 self.drop_block(unwind, Unwind::InCleanup)
472 (self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
473 unwind.map(|unwind| {
474 self.adt_switch_block(
475 adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
480 fn adt_switch_block(&mut self,
481 adt: &'tcx ty::AdtDef,
482 blocks: Vec<BasicBlock>,
487 // If there are multiple variants, then if something
488 // is present within the enum the discriminant, tracked
489 // by the rest path, must be initialized.
491 // Additionally, we do not want to switch on the
492 // discriminant after it is free-ed, because that
493 // way lies only trouble.
494 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
495 let discr = Place::Local(self.new_temp(discr_ty));
496 let discr_rv = Rvalue::Discriminant(self.place.clone());
497 let switch_block = BasicBlockData {
498 statements: vec![self.assign(&discr, discr_rv)],
499 terminator: Some(Terminator {
500 source_info: self.source_info,
501 kind: TerminatorKind::SwitchInt {
502 discr: Operand::Move(discr),
504 values: From::from(values.to_owned()),
508 is_cleanup: unwind.is_cleanup(),
510 let switch_block = self.elaborator.patch().new_block(switch_block);
511 self.drop_flag_test_block(switch_block, succ, unwind)
514 fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
517 debug!("destructor_call_block({:?}, {:?})", self, succ);
518 let tcx = self.tcx();
519 let drop_trait = tcx.lang_items().drop_trait().unwrap();
520 let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
521 let ty = self.place_ty(self.place);
522 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
524 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
526 mutbl: hir::Mutability::MutMutable
528 let ref_place = self.new_temp(ref_ty);
529 let unit_temp = Place::Local(self.new_temp(tcx.mk_nil()));
531 let result = BasicBlockData {
532 statements: vec![self.assign(
533 &Place::Local(ref_place),
534 Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.place.clone())
536 terminator: Some(Terminator {
537 kind: TerminatorKind::Call {
538 func: Operand::function_handle(tcx, drop_fn.def_id, substs,
539 self.source_info.span),
540 args: vec![Operand::Move(Place::Local(ref_place))],
541 destination: Some((unit_temp, succ)),
542 cleanup: unwind.into_option(),
544 source_info: self.source_info
546 is_cleanup: unwind.is_cleanup(),
548 self.elaborator.patch().new_block(result)
551 /// create a loop that drops an array:
556 /// can_go = cur == length_or_end
557 /// if can_go then succ else drop-block
561 /// cur = cur.offset(1)
563 /// ptr = &mut LV[cur]
567 fn drop_loop(&mut self,
570 length_or_end: &Place<'tcx>,
576 let copy = |place: &Place<'tcx>| Operand::Copy(place.clone());
577 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
578 let tcx = self.tcx();
580 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
582 mutbl: hir::Mutability::MutMutable
584 let ptr = &Place::Local(self.new_temp(ref_ty));
585 let can_go = &Place::Local(self.new_temp(tcx.types.bool));
587 let one = self.constant_usize(1);
588 let (ptr_next, cur_next) = if ptr_based {
589 (Rvalue::Use(copy(&Place::Local(cur))),
590 Rvalue::BinaryOp(BinOp::Offset, copy(&Place::Local(cur)), one))
595 self.place.clone().index(cur)),
596 Rvalue::BinaryOp(BinOp::Add, copy(&Place::Local(cur)), one))
599 let drop_block = BasicBlockData {
601 self.assign(ptr, ptr_next),
602 self.assign(&Place::Local(cur), cur_next)
604 is_cleanup: unwind.is_cleanup(),
605 terminator: Some(Terminator {
606 source_info: self.source_info,
607 // this gets overwritten by drop elaboration.
608 kind: TerminatorKind::Unreachable,
611 let drop_block = self.elaborator.patch().new_block(drop_block);
613 let loop_block = BasicBlockData {
615 self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
616 copy(&Place::Local(cur)),
617 copy(length_or_end)))
619 is_cleanup: unwind.is_cleanup(),
620 terminator: Some(Terminator {
621 source_info: self.source_info,
622 kind: TerminatorKind::if_(tcx, move_(can_go), succ, drop_block)
625 let loop_block = self.elaborator.patch().new_block(loop_block);
627 self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
628 location: ptr.clone().deref(),
630 unwind: unwind.into_option()
636 fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock {
637 debug!("open_drop_for_array({:?}, {:?})", ety, opt_size);
639 // if size_of::<ety>() == 0 {
645 if let Some(size) = opt_size {
646 assert!(size <= (u32::MAX as u64),
647 "move out check doesn't implemented for array bigger then u32");
648 let size = size as u32;
649 let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size).map(|i| {
650 (self.place.clone().elem(ProjectionElem::ConstantIndex{
655 self.elaborator.array_subpath(self.path, i, size))
658 if fields.iter().any(|(_,path)| path.is_some()) {
659 let (succ, unwind) = self.drop_ladder_bottom();
660 return self.drop_ladder(fields, succ, unwind).0
664 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
665 let tcx = self.tcx();
666 let size = &Place::Local(self.new_temp(tcx.types.usize));
667 let size_is_zero = &Place::Local(self.new_temp(tcx.types.bool));
668 let base_block = BasicBlockData {
670 self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
671 self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
673 self.constant_usize(0)))
675 is_cleanup: self.unwind.is_cleanup(),
676 terminator: Some(Terminator {
677 source_info: self.source_info,
678 kind: TerminatorKind::if_(
681 self.drop_loop_pair(ety, false),
682 self.drop_loop_pair(ety, true)
686 self.elaborator.patch().new_block(base_block)
689 // create a pair of drop-loops of `place`, which drops its contents
690 // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
691 // otherwise create an index loop.
692 fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
693 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
694 let tcx = self.tcx();
695 let iter_ty = if ptr_based {
701 let cur = self.new_temp(iter_ty);
702 let length = Place::Local(self.new_temp(tcx.types.usize));
703 let length_or_end = if ptr_based {
704 Place::Local(self.new_temp(iter_ty))
709 let unwind = self.unwind.map(|unwind| {
710 self.drop_loop(unwind,
718 let succ = self.succ; // FIXME(#6393)
719 let loop_block = self.drop_loop(
727 let cur = Place::Local(cur);
728 let zero = self.constant_usize(0);
729 let mut drop_block_stmts = vec![];
730 drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.place.clone())));
732 let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
733 let tmp = Place::Local(self.new_temp(tmp_ty));
735 // cur = tmp as *mut T;
736 // end = Offset(cur, len);
737 drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
738 tcx.types.re_erased, BorrowKind::Mut, self.place.clone()
740 drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
741 CastKind::Misc, Operand::Move(tmp.clone()), iter_ty
743 drop_block_stmts.push(self.assign(&length_or_end,
744 Rvalue::BinaryOp(BinOp::Offset,
745 Operand::Copy(cur.clone()), Operand::Move(length.clone())
748 // index = 0 (length already pushed)
749 drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
751 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
752 statements: drop_block_stmts,
753 is_cleanup: unwind.is_cleanup(),
754 terminator: Some(Terminator {
755 source_info: self.source_info,
756 kind: TerminatorKind::Goto { target: loop_block }
760 // FIXME(#34708): handle partially-dropped array/slice elements.
761 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
762 self.drop_flag_test_block(reset_block, succ, unwind)
765 /// The slow-path - create an "open", elaborated drop for a type
766 /// which is moved-out-of only partially, and patch `bb` to a jump
767 /// to it. This must not be called on ADTs with a destructor,
768 /// as these can't be moved-out-of, except for `Box<T>`, which is
771 /// This creates a "drop ladder" that drops the needed fields of the
772 /// ADT, both in the success case or if one of the destructors fail.
773 fn open_drop<'a>(&mut self) -> BasicBlock {
774 let ty = self.place_ty(self.place);
776 ty::TyClosure(def_id, substs) |
777 // Note that `elaborate_drops` only drops the upvars of a generator,
778 // and this is ok because `open_drop` here can only be reached
779 // within that own generator's resume function.
780 // This should only happen for the self argument on the resume function.
781 // It effetively only contains upvars until the generator transformation runs.
782 // See librustc_mir/transform/generator.rs for more details.
783 ty::TyGenerator(def_id, substs, _) => {
784 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
785 self.open_drop_for_tuple(&tys)
787 ty::TyTuple(tys, _) => {
788 self.open_drop_for_tuple(tys)
790 ty::TyAdt(def, _) if def.is_box() => {
791 self.open_drop_for_box(ty.boxed_ty())
793 ty::TyAdt(def, substs) => {
794 self.open_drop_for_adt(def, substs)
796 ty::TyDynamic(..) => {
797 let unwind = self.unwind; // FIXME(#6393)
798 let succ = self.succ;
799 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
801 ty::TyArray(ety, size) => self.open_drop_for_array(
802 ety, size.val.to_const_int().and_then(|v| v.to_u64())),
803 ty::TySlice(ety) => self.open_drop_for_array(ety, None),
805 _ => bug!("open drop from non-ADT `{:?}`", ty)
809 /// Return a basic block that drop a place using the context
810 /// and path in `c`. If `mode` is something, also clear `c`
813 /// if FLAG(self.path)
814 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
816 fn complete_drop<'a>(&mut self,
817 drop_mode: Option<DropFlagMode>,
819 unwind: Unwind) -> BasicBlock
821 debug!("complete_drop({:?},{:?})", self, drop_mode);
823 let drop_block = self.drop_block(succ, unwind);
824 let drop_block = if let Some(mode) = drop_mode {
825 self.drop_flag_reset_block(mode, drop_block, unwind)
830 self.drop_flag_test_block(drop_block, succ, unwind)
833 fn drop_flag_reset_block(&mut self,
836 unwind: Unwind) -> BasicBlock
838 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
840 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
841 let block_start = Location { block: block, statement_index: 0 };
842 self.elaborator.clear_drop_flag(block_start, self.path, mode);
846 fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
847 debug!("elaborated_drop_block({:?})", self);
848 let unwind = self.unwind; // FIXME(#6393)
849 let succ = self.succ;
850 let blk = self.drop_block(succ, unwind);
851 self.elaborate_drop(blk);
855 fn box_free_block<'a>(
861 let block = self.unelaborated_free_block(ty, target, unwind);
862 self.drop_flag_test_block(block, target, unwind)
865 fn unelaborated_free_block<'a>(
871 let tcx = self.tcx();
872 let unit_temp = Place::Local(self.new_temp(tcx.mk_nil()));
873 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
874 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
876 let call = TerminatorKind::Call {
877 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
878 args: vec![Operand::Move(self.place.clone())],
879 destination: Some((unit_temp, target)),
882 let free_block = self.new_block(unwind, call);
884 let block_start = Location { block: free_block, statement_index: 0 };
885 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
889 fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
890 let block = TerminatorKind::Drop {
891 location: self.place.clone(),
893 unwind: unwind.into_option()
895 self.new_block(unwind, block)
898 fn drop_flag_test_block(&mut self,
900 on_unset: BasicBlock,
904 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
905 debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
906 self, on_set, on_unset, unwind, style);
909 DropStyle::Dead => on_unset,
910 DropStyle::Static => on_set,
911 DropStyle::Conditional | DropStyle::Open => {
912 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
913 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
914 self.new_block(unwind, term)
919 fn new_block<'a>(&mut self,
921 k: TerminatorKind<'tcx>)
924 self.elaborator.patch().new_block(BasicBlockData {
926 terminator: Some(Terminator {
927 source_info: self.source_info, kind: k
929 is_cleanup: unwind.is_cleanup()
933 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
934 self.elaborator.patch().new_temp(ty, self.source_info.span)
937 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
938 let mir = self.elaborator.mir();
939 self.elaborator.patch().terminator_loc(mir, bb)
942 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
943 Operand::Constant(box Constant {
944 span: self.source_info.span,
945 ty: self.tcx().types.usize,
946 literal: Literal::Value {
947 value: self.tcx().mk_const(ty::Const {
948 val: ConstVal::Integral(self.tcx().const_usize(val)),
949 ty: self.tcx().types.usize
955 fn assign(&self, lhs: &Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
957 source_info: self.source_info,
958 kind: StatementKind::Assign(lhs.clone(), rhs)