});
if let Some(scope) = scope {
// schedule a shallow free of that memory, lest we unwind:
- this.schedule_drop(expr_span, scope, &Place::Local(result), value.ty);
+ this.schedule_drop_storage_and_value(
+ expr_span, scope, &Place::Local(result), value.ty,
+ );
}
// malloc some memory of suitable type (thus far, uninitialized):
// anything because no values with a destructor can be created in
// a constant at this time, even if the type may need dropping.
if let Some(temp_lifetime) = temp_lifetime {
- this.schedule_drop(expr_span, temp_lifetime, &Place::Local(temp), expr_ty);
+ this.schedule_drop_storage_and_value(
+ expr_span, temp_lifetime, &Place::Local(temp), expr_ty,
+ );
}
block.and(temp)
use build::{BlockAnd, BlockAndExtension, Builder};
use build::{GuardFrame, GuardFrameLocal, LocalsForNode};
use build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
+use build::scope::{CachedBlock, DropKind};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::bitvec::BitVector;
use rustc::ty::{self, Ty};
source_info,
kind: StatementKind::StorageLive(local_id)
});
- Place::Local(local_id)
+ let place = Place::Local(local_id);
+ let var_ty = self.local_decls[local_id].ty;
+ let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
+ let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
+ self.schedule_drop(
+ span, region_scope, &place, var_ty,
+ DropKind::Storage,
+ );
+ place
}
pub fn schedule_drop_for_binding(&mut self,
let var_ty = self.local_decls[local_id].ty;
let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
- self.schedule_drop(span, region_scope, &Place::Local(local_id), var_ty);
+ self.schedule_drop(
+ span, region_scope, &Place::Local(local_id), var_ty,
+ DropKind::Value {
+ cached_block: CachedBlock::default(),
+ },
+ );
}
pub fn visit_bindings<F>(&mut self, pattern: &Pattern<'tcx>, f: &mut F)
use build;
+use build::scope::{CachedBlock, DropKind};
use hair::cx::Cx;
use hair::{LintLevel, BindingMode, PatternKind};
use rustc::hir;
}
// Make sure we drop (parts of) the argument even when not matched on.
- self.schedule_drop(pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
- argument_scope, &place, ty);
-
+ self.schedule_drop(
+ pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
+ argument_scope, &place, ty,
+ DropKind::Value { cached_block: CachedBlock::default() },
+ );
}
// Enter the argument pattern bindings source scope, if it exists.
/// place to drop
location: Place<'tcx>,
- /// Whether this is a full value Drop, or just a StorageDead.
- kind: DropKind
+ /// Whether this is a value Drop or a StorageDead.
+ kind: DropKind,
}
#[derive(Debug, Default, Clone, Copy)]
-struct CachedBlock {
+pub(crate) struct CachedBlock {
/// The cached block for the cleanups-on-diverge path. This block
/// contains code to run the current drop and all the preceding
/// drops (i.e. those having lower index in Drop’s Scope drop
}
#[derive(Debug)]
-enum DropKind {
+pub(crate) enum DropKind {
Value {
cached_block: CachedBlock,
},
abortblk
}
+ pub fn schedule_drop_storage_and_value(
+ &mut self,
+ span: Span,
+ region_scope: region::Scope,
+ place: &Place<'tcx>,
+ place_ty: Ty<'tcx>,
+ ) {
+ self.schedule_drop(
+ span, region_scope, place, place_ty,
+ DropKind::Storage,
+ );
+ self.schedule_drop(
+ span, region_scope, place, place_ty,
+ DropKind::Value {
+ cached_block: CachedBlock::default(),
+ },
+ );
+ }
+
// Scheduling drops
// ================
/// Indicates that `place` should be dropped on exit from
/// `region_scope`.
- pub fn schedule_drop(&mut self,
- span: Span,
- region_scope: region::Scope,
- place: &Place<'tcx>,
- place_ty: Ty<'tcx>) {
+ ///
+ /// When called with `DropKind::Storage`, `place` should be a local
+ /// with an index higher than the current `self.arg_count`.
+ pub fn schedule_drop(
+ &mut self,
+ span: Span,
+ region_scope: region::Scope,
+ place: &Place<'tcx>,
+ place_ty: Ty<'tcx>,
+ drop_kind: DropKind,
+ ) {
let needs_drop = self.hir.needs_drop(place_ty);
- let drop_kind = if needs_drop {
- DropKind::Value { cached_block: CachedBlock::default() }
- } else {
- // Only temps and vars need their storage dead.
- match *place {
- Place::Local(index) if index.index() > self.arg_count => DropKind::Storage,
- _ => return
+ match drop_kind {
+ DropKind::Value { .. } => if !needs_drop { return },
+ DropKind::Storage => {
+ match *place {
+ Place::Local(index) => if index.index() <= self.arg_count {
+ span_bug!(
+ span, "`schedule_drop` called with index {} and arg_count {}",
+ index.index(),
+ self.arg_count,
+ )
+ },
+ _ => span_bug!(
+ span, "`schedule_drop` called with non-`Local` place {:?}", place
+ ),
+ }
}
- };
+ }
for scope in self.scopes.iter_mut().rev() {
let this_scope = scope.region_scope == region_scope;
});
block = next;
}
- DropKind::Storage => {}
- }
-
- // We do not need to emit StorageDead for generator drops
- if generator_drop {
- continue
- }
+ DropKind::Storage => {
+ // We do not need to emit StorageDead for generator drops
+ if generator_drop {
+ continue
+ }
- // Drop the storage for both value and storage drops.
- // Only temps and vars need their storage dead.
- match drop_data.location {
- Place::Local(index) if index.index() > arg_count => {
- cfg.push(block, Statement {
- source_info,
- kind: StatementKind::StorageDead(index)
- });
+ // Drop the storage for both value and storage drops.
+ // Only temps and vars need their storage dead.
+ match drop_data.location {
+ Place::Local(index) if index.index() > arg_count => {
+ cfg.push(block, Statement {
+ source_info,
+ kind: StatementKind::StorageDead(index)
+ });
+ }
+ _ => unreachable!(),
+ }
}
- _ => continue
}
}
block.unit()
// CHECK: [[S__4:%[0-9]+]] = bitcast { i32, i32 }* %_4 to i8*
// CHECK: call void @llvm.lifetime.start{{.*}}(i{{[0-9 ]+}}, i8* [[S__4]])
-// CHECK: [[E_b:%[0-9]+]] = bitcast { i32, i32 }** %b to i8*
-// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E_b]])
-
// CHECK: [[E__4:%[0-9]+]] = bitcast { i32, i32 }* %_4 to i8*
// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E__4]])
+
+// CHECK: [[E_b:%[0-9]+]] = bitcast { i32, i32 }** %b to i8*
+// CHECK: call void @llvm.lifetime.end{{.*}}(i{{[0-9 ]+}}, i8* [[E_b]])
}
let c = 1;
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// We must mark a variable whose initialization fails due to an
+// abort statement as StorageDead.
+
+fn main() {
+ loop {
+ let beacon = {
+ match true {
+ false => 4,
+ true => break,
+ }
+ };
+ drop(&beacon);
+ }
+}
+
+// END RUST SOURCE
+// START rustc.main.mir_map.0.mir
+// fn main() -> (){
+// let mut _0: ();
+// scope 1 {
+// }
+// scope 2 {
+// let _2: i32;
+// }
+// let mut _1: ();
+// let mut _3: bool;
+// let mut _4: u8;
+// let mut _5: !;
+// let mut _6: ();
+// let mut _7: &i32;
+// bb0: {
+// goto -> bb1;
+// }
+// bb1: {
+// falseUnwind -> [real: bb3, cleanup: bb4];
+// }
+// bb2: {
+// goto -> bb29;
+// }
+// bb3: {
+// StorageLive(_2);
+// StorageLive(_3);
+// _3 = const true;
+// _4 = discriminant(_3);
+// switchInt(_3) -> [false: bb11, otherwise: bb10];
+// }
+// bb4: {
+// resume;
+// }
+// bb5: {
+// _2 = const 4i32;
+// goto -> bb14;
+// }
+// bb6: {
+// _0 = ();
+// goto -> bb15;
+// }
+// bb7: {
+// falseEdges -> [real: bb12, imaginary: bb8];
+// }
+// bb8: {
+// falseEdges -> [real: bb13, imaginary: bb9];
+// }
+// bb9: {
+// unreachable;
+// }
+// bb10: {
+// goto -> bb8;
+// }
+// bb11: {
+// goto -> bb7;
+// }
+// bb12: {
+// goto -> bb5;
+// }
+// bb13: {
+// goto -> bb6;
+// }
+// bb14: {
+// StorageDead(_3);
+// StorageLive(_7);
+// _7 = &_2;
+// _6 = const std::mem::drop(move _7) -> [return: bb28, unwind: bb4];
+// }
+// bb15: {
+// goto -> bb16;
+// }
+// bb16: {
+// goto -> bb17;
+// }
+// bb17: {
+// goto -> bb18;
+// }
+// bb18: {
+// goto -> bb19;
+// }
+// bb19: {
+// goto -> bb20;
+// }
+// bb20: {
+// StorageDead(_3);
+// goto -> bb21;
+// }
+// bb21: {
+// goto -> bb22;
+// }
+// bb22: {
+// StorageDead(_2);
+// goto -> bb23;
+// }
+// bb23: {
+// goto -> bb24;
+// }
+// bb24: {
+// goto -> bb25;
+// }
+// bb25: {
+// goto -> bb2;
+// }
+// bb26: {
+// _5 = ();
+// unreachable;
+// }
+// bb27: {
+// StorageDead(_5);
+// goto -> bb14;
+// }
+// bb28: {
+// StorageDead(_7);
+// _1 = ();
+// StorageDead(_2);
+// goto -> bb1;
+// }
+// bb29: {
+// return;
+// }
+// }
+// END rustc.main.mir_map.0.mir
// StorageDead(_5);
// _3 = &_4;
// _2 = ();
-// StorageDead(_3);
// StorageDead(_4);
+// StorageDead(_3);
// StorageLive(_6);
// _6 = const 1i32;
// _0 = ();