1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use super::gather_moves::{HasMoveData, MoveData, MovePathIndex, LookupResult};
12 use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
13 use super::dataflow::{DataflowResults};
14 use super::{drop_flag_effects_for_location, on_all_children_bits};
15 use super::on_lookup_result_bits;
16 use super::{DropFlagState, MoveDataParamEnv};
17 use super::patch::MirPatch;
18 use rustc::ty::{self, Ty, TyCtxt};
19 use rustc::ty::subst::{Kind, Subst, Substs};
20 use rustc::ty::util::IntTypeExt;
22 use rustc::mir::transform::{Pass, MirPass, MirSource};
23 use rustc::middle::const_val::ConstVal;
24 use rustc::middle::lang_items;
25 use rustc::util::nodemap::FxHashMap;
26 use rustc_data_structures::indexed_set::IdxSetBuf;
27 use rustc_data_structures::indexed_vec::Idx;
34 pub struct ElaborateDrops;
36 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
37 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
38 src: MirSource, mir: &mut Mir<'tcx>)
40 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
42 MirSource::Fn(..) => {},
45 let id = src.item_id();
46 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
47 let move_data = MoveData::gather_moves(mir, tcx, ¶m_env);
48 let elaborate_patch = {
50 let env = MoveDataParamEnv {
55 super::do_dataflow(tcx, mir, id, &[],
56 MaybeInitializedLvals::new(tcx, mir, &env),
57 |bd, p| &bd.move_data().move_paths[p]);
59 super::do_dataflow(tcx, mir, id, &[],
60 MaybeUninitializedLvals::new(tcx, mir, &env),
61 |bd, p| &bd.move_data().move_paths[p]);
67 flow_inits: flow_inits,
68 flow_uninits: flow_uninits,
69 drop_flags: FxHashMap(),
70 patch: MirPatch::new(mir),
73 elaborate_patch.apply(mir);
77 impl Pass for ElaborateDrops {}
79 struct InitializationData {
80 live: IdxSetBuf<MovePathIndex>,
81 dead: IdxSetBuf<MovePathIndex>
84 impl InitializationData {
85 fn apply_location<'a,'tcx>(&mut self,
86 tcx: TyCtxt<'a, 'tcx, 'tcx>,
88 env: &MoveDataParamEnv<'tcx>,
91 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
92 debug!("at location {:?}: setting {:?} to {:?}",
95 DropFlagState::Present => {
97 self.dead.remove(&path);
99 DropFlagState::Absent => {
100 self.dead.add(&path);
101 self.live.remove(&path);
107 fn state(&self, path: MovePathIndex) -> (bool, bool) {
108 (self.live.contains(&path), self.dead.contains(&path))
112 impl fmt::Debug for InitializationData {
113 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
118 struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
119 tcx: TyCtxt<'a, 'tcx, 'tcx>,
121 env: &'a MoveDataParamEnv<'tcx>,
122 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
123 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
124 drop_flags: FxHashMap<MovePathIndex, Local>,
125 patch: MirPatch<'tcx>,
128 #[derive(Copy, Clone, Debug)]
129 struct DropCtxt<'a, 'tcx: 'a> {
130 source_info: SourceInfo,
133 init_data: &'a InitializationData,
135 lvalue: &'a Lvalue<'tcx>,
138 unwind: Option<BasicBlock>
141 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
142 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
143 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
147 fn initialization_data_at(&self, loc: Location) -> InitializationData {
148 let mut data = InitializationData {
149 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
151 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
154 for stmt in 0..loc.statement_index {
155 data.apply_location(self.tcx, self.mir, self.env,
156 Location { block: loc.block, statement_index: stmt });
161 fn create_drop_flag(&mut self, index: MovePathIndex) {
163 let patch = &mut self.patch;
164 debug!("create_drop_flag({:?})", self.mir.span);
165 self.drop_flags.entry(index).or_insert_with(|| {
166 patch.new_temp(tcx.types.bool)
170 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
171 self.drop_flags.get(&index).map(|t| Lvalue::Local(*t))
174 /// create a patch that elaborates all drops in the input
176 fn elaborate(mut self) -> MirPatch<'tcx>
178 self.collect_drop_flags();
180 self.elaborate_drops();
182 self.drop_flags_on_init();
183 self.drop_flags_for_fn_rets();
184 self.drop_flags_for_args();
185 self.drop_flags_for_locs();
190 fn path_needs_drop(&self, path: MovePathIndex) -> bool
192 let lvalue = &self.move_data().move_paths[path].lvalue;
193 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
194 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
196 self.tcx.type_needs_drop_given_env(ty, self.param_env())
199 fn collect_drop_flags(&mut self)
201 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
202 let terminator = data.terminator();
203 let location = match terminator.kind {
204 TerminatorKind::Drop { ref location, .. } |
205 TerminatorKind::DropAndReplace { ref location, .. } => location,
209 let init_data = self.initialization_data_at(Location {
211 statement_index: data.statements.len()
214 let path = self.move_data().rev_lookup.find(location);
215 debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
218 let path = match path {
219 LookupResult::Exact(e) => e,
220 LookupResult::Parent(None) => continue,
221 LookupResult::Parent(Some(parent)) => {
222 let (_maybe_live, maybe_dead) = init_data.state(parent);
224 span_bug!(terminator.source_info.span,
225 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
232 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
233 if self.path_needs_drop(child) {
234 let (maybe_live, maybe_dead) = init_data.state(child);
235 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
236 child, location, path, (maybe_live, maybe_dead));
237 if maybe_live && maybe_dead {
238 self.create_drop_flag(child)
245 fn elaborate_drops(&mut self)
247 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
248 let loc = Location { block: bb, statement_index: data.statements.len() };
249 let terminator = data.terminator();
251 let resume_block = self.patch.resume_block();
252 match terminator.kind {
253 TerminatorKind::Drop { ref location, target, unwind } => {
254 let init_data = self.initialization_data_at(loc);
255 match self.move_data().rev_lookup.find(location) {
256 LookupResult::Exact(path) => {
257 self.elaborate_drop(&DropCtxt {
258 source_info: terminator.source_info,
259 is_cleanup: data.is_cleanup,
260 init_data: &init_data,
264 unwind: if data.is_cleanup {
267 Some(Option::unwrap_or(unwind, resume_block))
271 LookupResult::Parent(..) => {
272 span_bug!(terminator.source_info.span,
273 "drop of untracked value {:?}", bb);
277 TerminatorKind::DropAndReplace { ref location, ref value,
280 assert!(!data.is_cleanup);
282 self.elaborate_replace(
293 /// Elaborate a MIR `replace` terminator. This instruction
294 /// is not directly handled by translation, and therefore
295 /// must be desugared.
297 /// The desugaring drops the location if needed, and then writes
298 /// the value (including setting the drop flag) over it in *both* arms.
300 /// The `replace` terminator can also be called on lvalues that
301 /// are not tracked by elaboration (for example,
302 /// `replace x[i] <- tmp0`). The borrow checker requires that
303 /// these locations are initialized before the assignment,
304 /// so we just generate an unconditional drop.
305 fn elaborate_replace(
308 location: &Lvalue<'tcx>,
309 value: &Operand<'tcx>,
311 unwind: Option<BasicBlock>)
314 let data = &self.mir[bb];
315 let terminator = data.terminator();
317 let assign = Statement {
318 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
319 source_info: terminator.source_info
322 let unwind = unwind.unwrap_or(self.patch.resume_block());
323 let unwind = self.patch.new_block(BasicBlockData {
324 statements: vec![assign.clone()],
325 terminator: Some(Terminator {
326 kind: TerminatorKind::Goto { target: unwind },
332 let target = self.patch.new_block(BasicBlockData {
333 statements: vec![assign],
334 terminator: Some(Terminator {
335 kind: TerminatorKind::Goto { target: target },
338 is_cleanup: data.is_cleanup,
341 match self.move_data().rev_lookup.find(location) {
342 LookupResult::Exact(path) => {
343 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
344 let init_data = self.initialization_data_at(loc);
346 self.elaborate_drop(&DropCtxt {
347 source_info: terminator.source_info,
348 is_cleanup: data.is_cleanup,
349 init_data: &init_data,
355 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
356 self.set_drop_flag(Location { block: target, statement_index: 0 },
357 child, DropFlagState::Present);
358 self.set_drop_flag(Location { block: unwind, statement_index: 0 },
359 child, DropFlagState::Present);
362 LookupResult::Parent(parent) => {
363 // drop and replace behind a pointer/array/whatever. The location
364 // must be initialized.
365 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
366 self.patch.patch_terminator(bb, TerminatorKind::Drop {
367 location: location.clone(),
375 /// This elaborates a single drop instruction, located at `bb`, and
378 /// The elaborated drop checks the drop flags to only drop what
381 /// In addition, the relevant drop flags also need to be cleared
382 /// to avoid double-drops. However, in the middle of a complex
383 /// drop, one must avoid clearing some of the flags before they
384 /// are read, as that would cause a memory leak.
386 /// In particular, when dropping an ADT, multiple fields may be
387 /// joined together under the `rest` subpath. They are all controlled
388 /// by the primary drop flag, but only the last rest-field dropped
389 /// should clear it (and it must also not clear anything else).
391 /// FIXME: I think we should just control the flags externally
392 /// and then we do not need this machinery.
393 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
394 debug!("elaborate_drop({:?})", c);
396 let mut some_live = false;
397 let mut some_dead = false;
398 let mut children_count = 0;
399 on_all_children_bits(
400 self.tcx, self.mir, self.move_data(),
402 if self.path_needs_drop(child) {
403 let (live, dead) = c.init_data.state(child);
404 debug!("elaborate_drop: state({:?}) = {:?}",
405 child, (live, dead));
412 debug!("elaborate_drop({:?}): live - {:?}", c,
413 (some_live, some_dead));
414 match (some_live, some_dead) {
415 (false, false) | (false, true) => {
416 // dead drop - patch it out
417 self.patch.patch_terminator(bb, TerminatorKind::Goto {
422 // static drop - just set the flag
423 self.patch.patch_terminator(bb, TerminatorKind::Drop {
424 location: c.lvalue.clone(),
428 self.drop_flags_for_drop(c, bb);
432 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
433 self.conditional_drop(c)
437 self.patch.patch_terminator(bb, TerminatorKind::Goto {
444 /// Return the lvalue and move path for each field of `variant`,
445 /// (the move path is `None` if the field is a rest field).
446 fn move_paths_for_fields(&self,
447 base_lv: &Lvalue<'tcx>,
448 variant_path: MovePathIndex,
449 variant: &'tcx ty::VariantDef,
450 substs: &'tcx Substs<'tcx>)
451 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
453 variant.fields.iter().enumerate().map(|(i, f)| {
455 super::move_path_children_matching(self.move_data(), variant_path, |p| {
458 elem: ProjectionElem::Field(idx, _), ..
459 } => idx.index() == i,
465 self.tcx.normalize_associated_type_in_env(
466 &f.ty(self.tcx, substs),
469 (base_lv.clone().field(Field::new(i), field_ty), subpath)
473 /// Create one-half of the drop ladder for a list of fields, and return
474 /// the list of steps in it in reverse order.
476 /// `unwind_ladder` is such a list of steps in reverse order,
477 /// which is called instead of the next step if the drop unwinds
478 /// (the first field is never reached). If it is `None`, all
479 /// unwind targets are left blank.
480 fn drop_halfladder<'a>(&mut self,
481 c: &DropCtxt<'a, 'tcx>,
482 unwind_ladder: Option<Vec<BasicBlock>>,
484 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
488 let mut unwind_succ = if is_cleanup {
494 let mut succ = self.new_block(
495 c, c.is_cleanup, TerminatorKind::Goto { target: succ }
498 // Always clear the "master" drop flag at the bottom of the
499 // ladder. This is needed because the "master" drop flag
500 // protects the ADT's discriminant, which is invalidated
501 // after the ADT is dropped.
503 Location { block: succ, statement_index: 0 },
505 DropFlagState::Absent
508 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
509 succ = if let Some(path) = path {
510 debug!("drop_ladder: for std field {} ({:?})", i, lv);
512 self.elaborated_drop_block(&DropCtxt {
513 source_info: c.source_info,
514 is_cleanup: is_cleanup,
515 init_data: c.init_data,
522 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
524 self.complete_drop(&DropCtxt {
525 source_info: c.source_info,
526 is_cleanup: is_cleanup,
527 init_data: c.init_data,
535 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
540 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
542 /// For example, with 3 fields, the drop ladder is
545 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
547 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
549 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
551 /// ELAB(drop location.1 [target=.c2])
553 /// ELAB(drop location.2 [target=`c.unwind])
554 fn drop_ladder<'a>(&mut self,
555 c: &DropCtxt<'a, 'tcx>,
556 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
559 debug!("drop_ladder({:?}, {:?})", c, fields);
561 let mut fields = fields;
562 fields.retain(|&(ref lvalue, _)| {
563 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
564 self.tcx.type_needs_drop_given_env(ty, self.param_env())
567 debug!("drop_ladder - fields needing drop: {:?}", fields);
569 let unwind_ladder = if c.is_cleanup {
572 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
575 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
576 .last().cloned().unwrap_or(c.succ)
579 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
582 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
584 let fields = tys.iter().enumerate().map(|(i, &ty)| {
585 (c.lvalue.clone().field(Field::new(i), ty),
586 super::move_path_children_matching(
587 self.move_data(), c.path, |proj| match proj {
589 elem: ProjectionElem::Field(f, _), ..
596 self.drop_ladder(c, fields)
599 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
602 debug!("open_drop_for_box({:?}, {:?})", c, ty);
604 let interior_path = super::move_path_children_matching(
605 self.move_data(), c.path, |proj| match proj {
606 &Projection { elem: ProjectionElem::Deref, .. } => true,
610 let interior = c.lvalue.clone().deref();
611 let inner_c = DropCtxt {
613 unwind: c.unwind.map(|u| {
614 self.box_free_block(c, ty, u, true)
616 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
621 self.elaborated_drop_block(&inner_c)
624 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
625 adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
627 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
629 match adt.variants.len() {
631 let fields = self.move_paths_for_fields(
637 self.drop_ladder(c, fields)
640 let mut values = Vec::with_capacity(adt.variants.len());
641 let mut blocks = Vec::with_capacity(adt.variants.len());
642 let mut otherwise = None;
643 for (variant_index, discr) in adt.discriminants(self.tcx).enumerate() {
644 let subpath = super::move_path_children_matching(
645 self.move_data(), c.path, |proj| match proj {
647 elem: ProjectionElem::Downcast(_, idx), ..
648 } => idx == variant_index,
651 if let Some(variant_path) = subpath {
652 let base_lv = c.lvalue.clone().elem(
653 ProjectionElem::Downcast(adt, variant_index)
655 let fields = self.move_paths_for_fields(
658 &adt.variants[variant_index],
661 blocks.push(self.drop_ladder(c, fields));
663 // variant not found - drop the entire enum
664 if let None = otherwise {
665 otherwise = Some(self.complete_drop(c, true));
669 if let Some(block) = otherwise {
674 // If there are multiple variants, then if something
675 // is present within the enum the discriminant, tracked
676 // by the rest path, must be initialized.
678 // Additionally, we do not want to switch on the
679 // discriminant after it is free-ed, because that
680 // way lies only trouble.
681 let discr_ty = adt.repr.discr_type().to_ty(self.tcx);
682 let discr = Lvalue::Local(self.patch.new_temp(discr_ty));
683 let switch_block = self.patch.new_block(BasicBlockData {
686 source_info: c.source_info,
687 kind: StatementKind::Assign(discr.clone(),
688 Rvalue::Discriminant(c.lvalue.clone()))
691 terminator: Some(Terminator {
692 source_info: c.source_info,
693 kind: TerminatorKind::SwitchInt {
694 discr: Operand::Consume(discr),
696 values: From::from(values),
700 is_cleanup: c.is_cleanup,
702 self.drop_flag_test_block(c, switch_block)
707 /// The slow-path - create an "open", elaborated drop for a type
708 /// which is moved-out-of only partially, and patch `bb` to a jump
709 /// to it. This must not be called on ADTs with a destructor,
710 /// as these can't be moved-out-of, except for `Box<T>`, which is
713 /// This creates a "drop ladder" that drops the needed fields of the
714 /// ADT, both in the success case or if one of the destructors fail.
715 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
716 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
718 ty::TyClosure(def_id, substs) => {
719 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
720 self.open_drop_for_tuple(c, &tys)
722 ty::TyTuple(tys, _) => {
723 self.open_drop_for_tuple(c, tys)
725 ty::TyAdt(def, _) if def.is_box() => {
726 self.open_drop_for_box(c, ty.boxed_ty())
728 ty::TyAdt(def, substs) => {
729 self.open_drop_for_adt(c, def, substs)
731 _ => bug!("open drop from non-ADT `{:?}`", ty)
735 /// Return a basic block that drop an lvalue using the context
736 /// and path in `c`. If `update_drop_flag` is true, also
740 /// if(update_drop_flag) FLAG(c.path) = false
742 fn complete_drop<'a>(
744 c: &DropCtxt<'a, 'tcx>,
745 update_drop_flag: bool)
748 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
750 let drop_block = self.drop_block(c);
751 if update_drop_flag {
753 Location { block: drop_block, statement_index: 0 },
755 DropFlagState::Absent
759 self.drop_flag_test_block(c, drop_block)
762 /// Create a simple conditional drop.
765 /// FLAGS(c.lv) = false
767 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
770 debug!("conditional_drop({:?})", c);
771 let drop_bb = self.drop_block(c);
772 self.drop_flags_for_drop(c, drop_bb);
774 self.drop_flag_test_block(c, drop_bb)
777 fn new_block<'a>(&mut self,
778 c: &DropCtxt<'a, 'tcx>,
780 k: TerminatorKind<'tcx>)
783 self.patch.new_block(BasicBlockData {
785 terminator: Some(Terminator {
786 source_info: c.source_info, kind: k
788 is_cleanup: is_cleanup
792 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
793 debug!("elaborated_drop_block({:?})", c);
794 let blk = self.drop_block(c);
795 self.elaborate_drop(c, blk);
799 fn drop_flag_test_block<'a>(&mut self,
800 c: &DropCtxt<'a, 'tcx>,
803 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
806 fn drop_flag_test_block_with_succ<'a>(&mut self,
807 c: &DropCtxt<'a, 'tcx>,
810 on_unset: BasicBlock)
813 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
814 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
815 c, is_cleanup, on_set, (maybe_live, maybe_dead));
817 match (maybe_live, maybe_dead) {
818 (false, _) => on_unset,
819 (true, false) => on_set,
821 let flag = self.drop_flag(c.path).unwrap();
822 let term = TerminatorKind::if_(self.tcx, Operand::Consume(flag), on_set, on_unset);
823 self.new_block(c, is_cleanup, term)
828 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
829 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
830 location: c.lvalue.clone(),
836 fn box_free_block<'a>(
838 c: &DropCtxt<'a, 'tcx>,
843 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
844 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
847 fn unelaborated_free_block<'a>(
849 c: &DropCtxt<'a, 'tcx>,
854 let mut statements = vec![];
855 if let Some(&flag) = self.drop_flags.get(&c.path) {
856 statements.push(Statement {
857 source_info: c.source_info,
858 kind: StatementKind::Assign(
860 self.constant_bool(c.source_info.span, false)
866 let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
867 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
868 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
869 let fty = tcx.item_type(free_func).subst(tcx, substs);
871 self.patch.new_block(BasicBlockData {
872 statements: statements,
873 terminator: Some(Terminator {
874 source_info: c.source_info, kind: TerminatorKind::Call {
875 func: Operand::Constant(Constant {
876 span: c.source_info.span,
878 literal: Literal::Item {
883 args: vec![Operand::Consume(c.lvalue.clone())],
884 destination: Some((unit_temp, target)),
888 is_cleanup: is_cleanup
892 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
893 // if we have a destuctor, we must *not* split the drop.
895 // dataflow can create unneeded children in some cases
896 // - be sure to ignore them.
898 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
901 ty::TyAdt(def, _) => {
902 if def.has_dtor(self.tcx) && !def.is_box() {
903 self.tcx.sess.span_warn(
905 &format!("dataflow bug??? moving out of type with dtor {:?}",
916 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
917 Rvalue::Use(Operand::Constant(Constant {
919 ty: self.tcx.types.bool,
920 literal: Literal::Value { value: ConstVal::Bool(val) }
924 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
925 if let Some(&flag) = self.drop_flags.get(&path) {
926 let span = self.patch.source_info_for_location(self.mir, loc).span;
927 let val = self.constant_bool(span, val.value());
928 self.patch.add_assign(loc, Lvalue::Local(flag), val);
932 fn drop_flags_on_init(&mut self) {
933 let loc = Location { block: START_BLOCK, statement_index: 0 };
934 let span = self.patch.source_info_for_location(self.mir, loc).span;
935 let false_ = self.constant_bool(span, false);
936 for flag in self.drop_flags.values() {
937 self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone());
941 fn drop_flags_for_fn_rets(&mut self) {
942 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
943 if let TerminatorKind::Call {
944 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
945 } = data.terminator().kind {
946 assert!(!self.patch.is_patched(bb));
948 let loc = Location { block: tgt, statement_index: 0 };
949 let path = self.move_data().rev_lookup.find(lv);
950 on_lookup_result_bits(
951 self.tcx, self.mir, self.move_data(), path,
952 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
958 fn drop_flags_for_args(&mut self) {
959 let loc = Location { block: START_BLOCK, statement_index: 0 };
960 super::drop_flag_effects_for_function_entry(
961 self.tcx, self.mir, self.env, |path, ds| {
962 self.set_drop_flag(loc, path, ds);
967 fn drop_flags_for_locs(&mut self) {
968 // We intentionally iterate only over the *old* basic blocks.
970 // Basic blocks created by drop elaboration update their
971 // drop flags by themselves, to avoid the drop flags being
972 // clobbered before they are read.
974 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
975 debug!("drop_flags_for_locs({:?})", data);
976 for i in 0..(data.statements.len()+1) {
977 debug!("drop_flag_for_locs: stmt {}", i);
978 let mut allow_initializations = true;
979 if i == data.statements.len() {
980 match data.terminator().kind {
981 TerminatorKind::Drop { .. } => {
982 // drop elaboration should handle that by itself
985 TerminatorKind::DropAndReplace { .. } => {
986 // this contains the move of the source and
987 // the initialization of the destination. We
988 // only want the former - the latter is handled
989 // by the elaboration code and must be done
990 // *after* the destination is dropped.
991 assert!(self.patch.is_patched(bb));
992 allow_initializations = false;
995 assert!(!self.patch.is_patched(bb));
999 let loc = Location { block: bb, statement_index: i };
1000 super::drop_flag_effects_for_location(
1001 self.tcx, self.mir, self.env, loc, |path, ds| {
1002 if ds == DropFlagState::Absent || allow_initializations {
1003 self.set_drop_flag(loc, path, ds)
1009 // There may be a critical edge after this call,
1010 // so mark the return as initialized *before* the
1012 if let TerminatorKind::Call {
1013 destination: Some((ref lv, _)), cleanup: None, ..
1014 } = data.terminator().kind {
1015 assert!(!self.patch.is_patched(bb));
1017 let loc = Location { block: bb, statement_index: data.statements.len() };
1018 let path = self.move_data().rev_lookup.find(lv);
1019 on_lookup_result_bits(
1020 self.tcx, self.mir, self.move_data(), path,
1021 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1027 fn drop_flags_for_drop<'a>(&mut self,
1028 c: &DropCtxt<'a, 'tcx>,
1031 let loc = self.patch.terminator_loc(self.mir, bb);
1032 on_all_children_bits(
1033 self.tcx, self.mir, self.move_data(), c.path,
1034 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)