1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 use rustc::middle::lang_items;
14 use rustc::ty::{self, Ty};
15 use rustc::ty::subst::{Kind, Subst, Substs};
16 use rustc::ty::util::IntTypeExt;
17 use rustc_data_structures::indexed_vec::Idx;
18 use util::patch::MirPatch;
22 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
23 pub enum DropFlagState {
24 Present, // i.e. initialized
25 Absent, // i.e. deinitialized or "moved"
29 pub fn value(self) -> bool {
31 DropFlagState::Present => true,
32 DropFlagState::Absent => false
46 pub enum DropFlagMode {
51 pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
52 type Path : Copy + fmt::Debug;
54 fn patch(&mut self) -> &mut MirPatch<'tcx>;
55 fn mir(&self) -> &'a Mir<'tcx>;
56 fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx>;
57 fn param_env(&self) -> &'a ty::ParameterEnvironment<'tcx>;
59 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
60 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
61 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
64 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
65 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
66 fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path>;
70 struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
71 where D : DropElaborator<'b, 'tcx> + 'l
73 elaborator: &'l mut D,
75 source_info: SourceInfo,
78 lvalue: &'l Lvalue<'tcx>,
81 unwind: Option<BasicBlock>,
84 pub fn elaborate_drop<'b, 'tcx, D>(
86 source_info: SourceInfo,
88 lvalue: &Lvalue<'tcx>,
91 unwind: Option<BasicBlock>,
93 where D: DropElaborator<'b, 'tcx>
96 elaborator, source_info, is_cleanup, lvalue, path, succ, unwind
100 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
101 where D: DropElaborator<'b, 'tcx>
103 fn lvalue_ty(&self, lvalue: &Lvalue<'tcx>) -> Ty<'tcx> {
104 lvalue.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
107 fn tcx(&self) -> ty::TyCtxt<'b, 'tcx, 'tcx> {
108 self.elaborator.tcx()
111 /// This elaborates a single drop instruction, located at `bb`, and
114 /// The elaborated drop checks the drop flags to only drop what
117 /// In addition, the relevant drop flags also need to be cleared
118 /// to avoid double-drops. However, in the middle of a complex
119 /// drop, one must avoid clearing some of the flags before they
120 /// are read, as that would cause a memory leak.
122 /// In particular, when dropping an ADT, multiple fields may be
123 /// joined together under the `rest` subpath. They are all controlled
124 /// by the primary drop flag, but only the last rest-field dropped
125 /// should clear it (and it must also not clear anything else).
127 /// FIXME: I think we should just control the flags externally
128 /// and then we do not need this machinery.
129 pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
130 debug!("elaborate_drop({:?})", self);
131 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
132 debug!("elaborate_drop({:?}): live - {:?}", self, style);
135 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
139 DropStyle::Static => {
140 let loc = self.terminator_loc(bb);
141 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
142 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
143 location: self.lvalue.clone(),
148 DropStyle::Conditional => {
149 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep));
150 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
155 let drop_bb = self.open_drop();
156 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
163 /// Return the lvalue and move path for each field of `variant`,
164 /// (the move path is `None` if the field is a rest field).
165 fn move_paths_for_fields(&self,
166 base_lv: &Lvalue<'tcx>,
167 variant_path: D::Path,
168 variant: &'tcx ty::VariantDef,
169 substs: &'tcx Substs<'tcx>)
170 -> Vec<(Lvalue<'tcx>, Option<D::Path>)>
172 variant.fields.iter().enumerate().map(|(i, f)| {
173 let field = Field::new(i);
174 let subpath = self.elaborator.field_subpath(variant_path, field);
177 self.tcx().normalize_associated_type_in_env(
178 &f.ty(self.tcx(), substs),
179 self.elaborator.param_env()
181 (base_lv.clone().field(field, field_ty), subpath)
185 fn drop_subpath(&mut self,
187 lvalue: &Lvalue<'tcx>,
188 path: Option<D::Path>,
190 unwind: Option<BasicBlock>)
193 if let Some(path) = path {
194 debug!("drop_subpath: for std field {:?}", lvalue);
197 elaborator: self.elaborator,
198 source_info: self.source_info,
199 path, lvalue, succ, unwind, is_cleanup
200 }.elaborated_drop_block()
202 debug!("drop_subpath: for rest field {:?}", lvalue);
205 elaborator: self.elaborator,
206 source_info: self.source_info,
207 lvalue, succ, unwind, is_cleanup,
208 // Using `self.path` here to condition the drop on
209 // our own drop flag.
211 }.complete_drop(None)
215 /// Create one-half of the drop ladder for a list of fields, and return
216 /// the list of steps in it in reverse order.
218 /// `unwind_ladder` is such a list of steps in reverse order,
219 /// which is called instead of the next step if the drop unwinds
220 /// (the first field is never reached). If it is `None`, all
221 /// unwind targets are left blank.
222 fn drop_halfladder<'a>(&mut self,
223 unwind_ladder: Option<Vec<BasicBlock>>,
225 fields: &[(Lvalue<'tcx>, Option<D::Path>)],
229 let mut unwind_succ = if is_cleanup {
235 let goto = TerminatorKind::Goto { target: succ };
236 let mut succ = self.new_block(is_cleanup, goto);
238 // Always clear the "master" drop flag at the bottom of the
239 // ladder. This is needed because the "master" drop flag
240 // protects the ADT's discriminant, which is invalidated
241 // after the ADT is dropped.
242 let succ_loc = Location { block: succ, statement_index: 0 };
243 self.elaborator.clear_drop_flag(succ_loc, self.path, DropFlagMode::Shallow);
245 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
246 succ = self.drop_subpath(is_cleanup, lv, path, succ, unwind_succ);
247 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
252 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
254 /// For example, with 3 fields, the drop ladder is
257 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
259 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
261 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
263 /// ELAB(drop location.1 [target=.c2])
265 /// ELAB(drop location.2 [target=`self.unwind])
266 fn drop_ladder<'a>(&mut self,
267 fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>)
270 debug!("drop_ladder({:?}, {:?})", self, fields);
272 let mut fields = fields;
273 fields.retain(|&(ref lvalue, _)| {
274 self.tcx().type_needs_drop_given_env(
275 self.lvalue_ty(lvalue), self.elaborator.param_env())
278 debug!("drop_ladder - fields needing drop: {:?}", fields);
280 let unwind_ladder = if self.is_cleanup {
283 let unwind = self.unwind.unwrap(); // FIXME(#6393)
284 Some(self.drop_halfladder(None, unwind, &fields, true))
287 let succ = self.succ; // FIXME(#6393)
288 let is_cleanup = self.is_cleanup;
289 self.drop_halfladder(unwind_ladder, succ, &fields, is_cleanup)
290 .last().cloned().unwrap_or(succ)
293 fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
296 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
298 let fields = tys.iter().enumerate().map(|(i, &ty)| {
299 (self.lvalue.clone().field(Field::new(i), ty),
300 self.elaborator.field_subpath(self.path, Field::new(i)))
303 self.drop_ladder(fields)
306 fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
308 debug!("open_drop_for_box({:?}, {:?})", self, ty);
310 let interior = self.lvalue.clone().deref();
311 let interior_path = self.elaborator.deref_subpath(self.path);
313 let succ = self.succ; // FIXME(#6393)
314 let is_cleanup = self.is_cleanup;
315 let succ = self.box_free_block(ty, succ, is_cleanup);
316 let unwind_succ = self.unwind.map(|u| {
317 self.box_free_block(ty, u, true)
320 self.drop_subpath(is_cleanup, &interior, interior_path, succ, unwind_succ)
323 fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
325 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
327 match adt.variants.len() {
329 let fields = self.move_paths_for_fields(
335 self.drop_ladder(fields)
338 let mut values = Vec::with_capacity(adt.variants.len());
339 let mut blocks = Vec::with_capacity(adt.variants.len());
340 let mut otherwise = None;
341 for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
342 let subpath = self.elaborator.downcast_subpath(
343 self.path, variant_index);
344 if let Some(variant_path) = subpath {
345 let base_lv = self.lvalue.clone().elem(
346 ProjectionElem::Downcast(adt, variant_index)
348 let fields = self.move_paths_for_fields(
351 &adt.variants[variant_index],
354 blocks.push(self.drop_ladder(fields));
356 // variant not found - drop the entire enum
357 if let None = otherwise {
359 Some(self.complete_drop(Some(DropFlagMode::Shallow)));
363 if let Some(block) = otherwise {
368 // If there are multiple variants, then if something
369 // is present within the enum the discriminant, tracked
370 // by the rest path, must be initialized.
372 // Additionally, we do not want to switch on the
373 // discriminant after it is free-ed, because that
374 // way lies only trouble.
375 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
376 let discr = Lvalue::Local(self.new_temp(discr_ty));
377 let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
378 let switch_block = self.elaborator.patch().new_block(BasicBlockData {
381 source_info: self.source_info,
382 kind: StatementKind::Assign(discr.clone(), discr_rv),
385 terminator: Some(Terminator {
386 source_info: self.source_info,
387 kind: TerminatorKind::SwitchInt {
388 discr: Operand::Consume(discr),
390 values: From::from(values),
394 is_cleanup: self.is_cleanup,
396 self.drop_flag_test_block(switch_block)
401 /// The slow-path - create an "open", elaborated drop for a type
402 /// which is moved-out-of only partially, and patch `bb` to a jump
403 /// to it. This must not be called on ADTs with a destructor,
404 /// as these can't be moved-out-of, except for `Box<T>`, which is
407 /// This creates a "drop ladder" that drops the needed fields of the
408 /// ADT, both in the success case or if one of the destructors fail.
409 fn open_drop<'a>(&mut self) -> BasicBlock {
410 let ty = self.lvalue_ty(self.lvalue);
412 ty::TyClosure(def_id, substs) => {
413 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
414 self.open_drop_for_tuple(&tys)
416 ty::TyTuple(tys, _) => {
417 self.open_drop_for_tuple(tys)
419 ty::TyAdt(def, _) if def.is_box() => {
420 self.open_drop_for_box(ty.boxed_ty())
422 ty::TyAdt(def, substs) => {
423 self.open_drop_for_adt(def, substs)
425 _ => bug!("open drop from non-ADT `{:?}`", ty)
429 /// Return a basic block that drop an lvalue using the context
430 /// and path in `c`. If `mode` is something, also clear `c`
433 /// if FLAG(self.path)
434 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
436 fn complete_drop<'a>(&mut self, drop_mode: Option<DropFlagMode>) -> BasicBlock
438 debug!("complete_drop({:?},{:?})", self, drop_mode);
440 let drop_block = self.drop_block();
441 if let Some(mode) = drop_mode {
442 let block_start = Location { block: drop_block, statement_index: 0 };
443 self.elaborator.clear_drop_flag(block_start, self.path, mode);
446 self.drop_flag_test_block(drop_block)
449 fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
450 debug!("elaborated_drop_block({:?})", self);
451 let blk = self.drop_block();
452 self.elaborate_drop(blk);
456 fn box_free_block<'a>(
462 let block = self.unelaborated_free_block(ty, target, is_cleanup);
463 self.drop_flag_test_block_with_succ(is_cleanup, block, target)
466 fn unelaborated_free_block<'a>(
472 let tcx = self.tcx();
473 let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
474 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
475 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
476 let fty = tcx.item_type(free_func).subst(tcx, substs);
478 let free_block = self.elaborator.patch().new_block(BasicBlockData {
480 terminator: Some(Terminator {
481 source_info: self.source_info, kind: TerminatorKind::Call {
482 func: Operand::Constant(Constant {
483 span: self.source_info.span,
485 literal: Literal::Item {
490 args: vec![Operand::Consume(self.lvalue.clone())],
491 destination: Some((unit_temp, target)),
495 is_cleanup: is_cleanup
497 let block_start = Location { block: free_block, statement_index: 0 };
498 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
502 fn drop_block<'a>(&mut self) -> BasicBlock {
503 let block = TerminatorKind::Drop {
504 location: self.lvalue.clone(),
508 let is_cleanup = self.is_cleanup; // FIXME(#6393)
509 self.new_block(is_cleanup, block)
512 fn drop_flag_test_block<'a>(&mut self, on_set: BasicBlock) -> BasicBlock {
513 let is_cleanup = self.is_cleanup;
514 let succ = self.succ; // FIXME(#6393)
515 self.drop_flag_test_block_with_succ(is_cleanup, on_set, succ)
518 fn drop_flag_test_block_with_succ<'a>(&mut self,
521 on_unset: BasicBlock)
524 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
525 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
526 self, is_cleanup, on_set, style);
529 DropStyle::Dead => on_unset,
530 DropStyle::Static => on_set,
531 DropStyle::Conditional | DropStyle::Open => {
532 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
533 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
534 self.new_block(is_cleanup, term)
539 fn new_block<'a>(&mut self,
541 k: TerminatorKind<'tcx>)
544 self.elaborator.patch().new_block(BasicBlockData {
546 terminator: Some(Terminator {
547 source_info: self.source_info, kind: k
549 is_cleanup: is_cleanup
553 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
554 self.elaborator.patch().new_temp(ty)
557 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
558 let mir = self.elaborator.mir();
559 self.elaborator.patch().terminator_loc(mir, bb)