E0491, // in type `..`, reference has a longer lifetime than the data it...
E0495, // cannot infer an appropriate lifetime due to conflicting requirements
E0525, // expected a closure that implements `..` but this closure only implements `..`
+ E0526, // skolemization subtype
}
"");
err
}
+ infer::SkolemizeSuccessor(span) => {
+ let mut err =
+ struct_span_err!(self.tcx.sess, span, E0526,
+ "to satisfy higher-ranked bounds, \
+ a static lifetime is required");
+ self.tcx.note_and_explain_region(&mut err,
+ "but the lifetime is only valid for ",
+ sub,
+ "");
+ err
+ }
}
}
"...so that references are valid when the destructor \
runs");
}
+ infer::SkolemizeSuccessor(span) => {
+ err.span_note(
+ span,
+ "...so that higher-ranked bounds are satisfied");
+ }
}
}
}
//! Helper routines for higher-ranked things. See the `doc` module at
//! the end of the file for details.
-use super::{CombinedSnapshot, InferCtxt, HigherRankedType, SkolemizationMap};
+use super::{CombinedSnapshot,
+ InferCtxt,
+ LateBoundRegion,
+ HigherRankedType,
+ SubregionOrigin,
+ SkolemizationMap};
use super::combine::CombineFields;
+use super::region_inference::{TaintDirections};
+use infer::error_reporting;
use ty::{self, TyCtxt, Binder, TypeFoldable};
use ty::error::TypeError;
use ty::relate::{Relate, RelateResult, TypeRelation};
// Start a snapshot so we can examine "all bindings that were
// created as part of this type comparison".
return self.infcx.commit_if_ok(|snapshot| {
+ let span = self.trace.origin.span();
+
// First, we instantiate each bound region in the subtype with a fresh
// region variable.
let (a_prime, _) =
self.infcx.replace_late_bound_regions_with_fresh_var(
- self.trace.origin.span(),
+ span,
HigherRankedType,
a);
// Presuming type comparison succeeds, we need to check
// that the skolemized regions do not "leak".
- self.infcx.leak_check(!self.a_is_expected, &skol_map, snapshot)?;
+ self.infcx.leak_check(!self.a_is_expected, span, &skol_map, snapshot)?;
+
+ // We are finished with the skolemized regions now so pop
+ // them off.
+ self.infcx.pop_skolemized(skol_map, snapshot);
debug!("higher_ranked_sub: OK result={:?}", result);
return r0;
}
- let tainted = infcx.tainted_regions(snapshot, r0);
+ let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
return r0;
}
- let tainted = infcx.tainted_regions(snapshot, r0);
+ let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
let mut a_r = None;
let mut b_r = None;
}
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
- fn tainted_regions(&self, snapshot: &CombinedSnapshot, r: ty::Region) -> Vec<ty::Region> {
- self.region_vars.tainted(&snapshot.region_vars_snapshot, r)
+ fn tainted_regions(&self,
+ snapshot: &CombinedSnapshot,
+ r: ty::Region,
+ directions: TaintDirections)
+ -> FnvHashSet<ty::Region> {
+ self.region_vars.tainted(&snapshot.region_vars_snapshot, r, directions)
}
fn region_vars_confined_to_snapshot(&self,
region_vars
}
+ /// Replace all regions bound by `binder` with skolemized regions and
+ /// return a map indicating which bound-region was replaced with what
+ /// skolemized region. This is the first step of checking subtyping
+ /// when higher-ranked things are involved.
+ ///
+ /// **Important:** you must call this function from within a snapshot.
+ /// Moreover, before committing the snapshot, you must eventually call
+ /// either `plug_leaks` or `pop_skolemized` to remove the skolemized
+ /// regions. If you rollback the snapshot (or are using a probe), then
+ /// the pop occurs as part of the rollback, so an explicit call is not
+ /// needed (but is also permitted).
+ ///
+ /// See `README.md` for more details.
pub fn skolemize_late_bound_regions<T>(&self,
binder: &ty::Binder<T>,
snapshot: &CombinedSnapshot)
-> (T, SkolemizationMap)
where T : TypeFoldable<'tcx>
{
- /*!
- * Replace all regions bound by `binder` with skolemized regions and
- * return a map indicating which bound-region was replaced with what
- * skolemized region. This is the first step of checking subtyping
- * when higher-ranked things are involved. See `README.md` for more
- * details.
- */
-
let (result, map) = self.tcx.replace_late_bound_regions(binder, |br| {
- self.region_vars.new_skolemized(br, &snapshot.region_vars_snapshot)
+ self.region_vars.push_skolemized(br, &snapshot.region_vars_snapshot)
});
debug!("skolemize_bound_regions(binder={:?}, result={:?}, map={:?})",
(result, map)
}
+ /// Searches the region constriants created since `snapshot` was started
+ /// and checks to determine whether any of the skolemized regions created
+ /// in `skol_map` would "escape" -- meaning that they are related to
+ /// other regions in some way. If so, the higher-ranked subtyping doesn't
+ /// hold. See `README.md` for more details.
pub fn leak_check(&self,
overly_polymorphic: bool,
+ span: Span,
skol_map: &SkolemizationMap,
snapshot: &CombinedSnapshot)
-> RelateResult<'tcx, ()>
{
- /*!
- * Searches the region constriants created since `snapshot` was started
- * and checks to determine whether any of the skolemized regions created
- * in `skol_map` would "escape" -- meaning that they are related to
- * other regions in some way. If so, the higher-ranked subtyping doesn't
- * hold. See `README.md` for more details.
- */
-
debug!("leak_check: skol_map={:?}",
skol_map);
let new_vars = self.region_vars_confined_to_snapshot(snapshot);
for (&skol_br, &skol) in skol_map {
- let tainted = self.tainted_regions(snapshot, skol);
- for &tainted_region in &tainted {
+ // The inputs to a skolemized variable can only
+ // be itself or other new variables.
+ let incoming_taints = self.tainted_regions(snapshot,
+ skol,
+ TaintDirections::incoming());
+ for &tainted_region in &incoming_taints {
// Each skolemized should only be relatable to itself
// or new variables:
match tainted_region {
}
}
}
+
+ for (_, &skol) in skol_map {
+ // The outputs from a skolemized variable must all be
+ // equatable with `'static`.
+ let outgoing_taints = self.tainted_regions(snapshot,
+ skol,
+ TaintDirections::outgoing());
+ for &tainted_region in &outgoing_taints {
+ match tainted_region {
+ ty::ReVar(vid) if new_vars.contains(&vid) => {
+ // There is a path from a skolemized variable
+ // to some region variable that doesn't escape
+ // this snapshot:
+ //
+ // [skol] -> [tainted_region]
+ //
+ // We can ignore this. The reasoning relies on
+ // the fact that the preivous loop
+ // completed. There are two possible cases
+ // here.
+ //
+ // - `tainted_region` eventually reaches a
+ // skolemized variable, which *must* be `skol`
+ // (because otherwise we would have already
+ // returned `Err`). In that case,
+ // `tainted_region` could be inferred to `skol`.
+ //
+ // - `tainted_region` never reaches a
+ // skolemized variable. In that case, we can
+ // safely choose `'static` as an upper bound
+ // incoming edges. This is a conservative
+ // choice -- the LUB might be one of the
+ // incoming skolemized variables, which we
+ // might know by ambient bounds. We can
+ // consider a more clever choice of upper
+ // bound later (modulo some theoretical
+ // breakage).
+ //
+ // We used to force such `tainted_region` to be
+ // `'static`, but that leads to problems when
+ // combined with `plug_leaks`. If you have a case
+ // where `[skol] -> [tainted_region] -> [skol]`,
+ // then `plug_leaks` concludes it should replace
+ // `'static` with a late-bound region, which is
+ // clearly wrong. (Well, what actually happens is
+ // you get assertion failures because it WOULD
+ // have to replace 'static with a late-bound
+ // region.)
+ }
+ ty::ReSkolemized(..) => {
+ // the only skolemized region we find in the
+ // successors of X can be X; if there was another
+ // region Y, then X would have been in the preds
+ // of Y, and we would have aborted above
+ assert_eq!(skol, tainted_region);
+ }
+ _ => {
+ self.region_vars.make_subregion(
+ SubregionOrigin::SkolemizeSuccessor(span),
+ ty::ReStatic,
+ tainted_region);
+ }
+ }
+ }
+ }
+
Ok(())
}
value: &T) -> T
where T : TypeFoldable<'tcx>
{
- debug_assert!(self.leak_check(false, &skol_map, snapshot).is_ok());
-
debug!("plug_leaks(skol_map={:?}, value={:?})",
skol_map,
value);
// these taint sets are mutually disjoint.
let inv_skol_map: FnvHashMap<ty::Region, ty::BoundRegion> =
skol_map
- .into_iter()
- .flat_map(|(skol_br, skol)| {
- self.tainted_regions(snapshot, skol)
+ .iter()
+ .flat_map(|(&skol_br, &skol)| {
+ self.tainted_regions(snapshot, skol, TaintDirections::incoming())
.into_iter()
.map(move |tainted_region| (tainted_region, skol_br))
})
// binders, so this assert is satisfied.
assert!(current_depth > 1);
+ // since leak-check passed, this skolemized region
+ // should only have incoming edges from variables
+ // (which ought not to escape the snapshot, but we
+ // don't check that) or itself
+ assert!(
+ match r {
+ ty::ReVar(_) => true,
+ ty::ReSkolemized(_, ref br1) => br == br1,
+ _ => false,
+ },
+ "leak-check would have us replace {:?} with {:?}",
+ r, br);
+
ty::ReLateBound(ty::DebruijnIndex::new(current_depth - 1), br.clone())
}
}
debug!("plug_leaks: result={:?}",
result);
+ self.pop_skolemized(skol_map, snapshot);
+
+ debug!("plug_leaks: result={:?}", result);
+
result
}
+
+ /// Pops the skolemized regions found in `skol_map` from the region
+ /// inference context. Whenever you create skolemized regions via
+ /// `skolemize_late_bound_regions`, they must be popped before you
+ /// commit the enclosing snapshot (if you do not commit, e.g. within a
+ /// probe or as a result of an error, then this is not necessary, as
+ /// popping happens as part of the rollback).
+ ///
+ /// Note: popping also occurs implicitly as part of `leak_check`.
+ pub fn pop_skolemized(&self,
+ skol_map: SkolemizationMap,
+ snapshot: &CombinedSnapshot)
+ {
+ debug!("pop_skolemized({:?})", skol_map);
+ let skol_regions: FnvHashSet<_> = skol_map.values().cloned().collect();
+ self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot);
+ }
}
// Region constraint arriving from destructor safety
SafeDestructor(Span),
+
+ // When doing a higher-ranked comparison, this region was a
+ // successor from a skolemized region, which means that it must be
+ // `'static` to be sound.
+ SkolemizeSuccessor(Span),
}
/// Places that type/region parameters can appear.
}
impl<'tcx, T> InferOk<'tcx, T> {
- fn unit(self) -> InferOk<'tcx, ()> {
+ pub fn unit(self) -> InferOk<'tcx, ()> {
InferOk { value: (), obligations: self.obligations }
}
}
self.skolemize_late_bound_regions(predicate, snapshot);
let origin = TypeOrigin::EquatePredicate(span);
let eqty_ok = self.eq_types(false, origin, a, b)?;
- self.leak_check(false, &skol_map, snapshot).map(|_| eqty_ok.unit())
+ self.leak_check(false, span, &skol_map, snapshot)?;
+ self.pop_skolemized(skol_map, snapshot);
+ Ok(eqty_ok.unit())
})
}
self.skolemize_late_bound_regions(predicate, snapshot);
let origin = RelateRegionParamBound(span);
self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b`
- self.leak_check(false, &skol_map, snapshot)
+ self.leak_check(false, span, &skol_map, snapshot)?;
+ Ok(self.pop_skolemized(skol_map, snapshot))
})
}
AddrOf(a) => a,
AutoBorrow(a) => a,
SafeDestructor(a) => a,
+ SkolemizeSuccessor(a) => a,
}
}
}
use super::{RegionVariableOrigin, SubregionOrigin, MiscVariable};
use super::unify_key;
+use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet};
use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
use rustc_data_structures::unify::{self, UnificationTable};
use middle::free_region::FreeRegionMap;
use ty::{BoundRegion, Region, RegionVid};
use ty::{ReEmpty, ReStatic, ReFree, ReEarlyBound};
use ty::{ReLateBound, ReScope, ReVar, ReSkolemized, BrFresh};
-use util::common::indenter;
-use util::nodemap::{FnvHashMap, FnvHashSet};
use std::cell::{Cell, RefCell};
use std::cmp::Ordering::{self, Less, Greater, Equal};
use std::fmt;
+use std::mem;
use std::u32;
use syntax::ast;
#[derive(Copy, Clone, PartialEq)]
pub enum UndoLogEntry {
+ /// Pushed when we start a snapshot.
OpenSnapshot,
+
+ /// Replaces an `OpenSnapshot` when a snapshot is committed, but
+ /// that snapshot is not the root. If the root snapshot is
+ /// unrolled, all nested snapshots must be committed.
CommitedSnapshot,
+
+ /// We added `RegionVid`
AddVar(RegionVid),
+
+ /// We added the given `constraint`
AddConstraint(Constraint),
+
+ /// We added the given `verify`
AddVerify(usize),
+
+ /// We added the given `given`
AddGiven(ty::FreeRegion, ty::RegionVid),
+
+ /// We added a GLB/LUB "combinaton variable"
AddCombination(CombineMapType, TwoRegions),
+
+ /// During skolemization, we sometimes purge entries from the undo
+ /// log in a kind of minisnapshot (unlike other snapshots, this
+ /// purging actually takes place *on success*). In that case, we
+ /// replace the corresponding entry with `Noop` so as to avoid the
+ /// need to do a bunch of swapping. (We can't use `swap_remove` as
+ /// the order of the vector is important.)
+ Purged,
}
#[derive(Copy, Clone, PartialEq)]
skolemization_count: u32,
}
+/// When working with skolemized regions, we often wish to find all of
+/// the regions that are either reachable from a skolemized region, or
+/// which can reach a skolemized region, or both. We call such regions
+/// *tained* regions. This struct allows you to decide what set of
+/// tainted regions you want.
+#[derive(Debug)]
+pub struct TaintDirections {
+ incoming: bool,
+ outgoing: bool,
+}
+
+impl TaintDirections {
+ pub fn incoming() -> Self {
+ TaintDirections { incoming: true, outgoing: false }
+ }
+
+ pub fn outgoing() -> Self {
+ TaintDirections { incoming: false, outgoing: true }
+ }
+
+ pub fn both() -> Self {
+ TaintDirections { incoming: true, outgoing: true }
+ }
+}
+
+struct TaintSet {
+ directions: TaintDirections,
+ regions: FnvHashSet<ty::Region>
+}
+
+impl TaintSet {
+ fn new(directions: TaintDirections,
+ initial_region: ty::Region)
+ -> Self {
+ let mut regions = FnvHashSet();
+ regions.insert(initial_region);
+ TaintSet { directions: directions, regions: regions }
+ }
+
+ fn fixed_point(&mut self,
+ undo_log: &[UndoLogEntry],
+ verifys: &[Verify]) {
+ let mut prev_len = 0;
+ while prev_len < self.len() {
+ debug!("tainted: prev_len = {:?} new_len = {:?}",
+ prev_len, self.len());
+
+ prev_len = self.len();
+
+ for undo_entry in undo_log {
+ match undo_entry {
+ &AddConstraint(ConstrainVarSubVar(a, b)) => {
+ self.add_edge(ReVar(a), ReVar(b));
+ }
+ &AddConstraint(ConstrainRegSubVar(a, b)) => {
+ self.add_edge(a, ReVar(b));
+ }
+ &AddConstraint(ConstrainVarSubReg(a, b)) => {
+ self.add_edge(ReVar(a), b);
+ }
+ &AddGiven(a, b) => {
+ self.add_edge(ReFree(a), ReVar(b));
+ }
+ &AddVerify(i) => {
+ match verifys[i] {
+ VerifyRegSubReg(_, a, b) => {
+ self.add_edge(a, b);
+ }
+ VerifyGenericBound(_, _, a, ref bound) => {
+ bound.for_each_region(&mut |b| {
+ self.add_edge(a, b);
+ });
+ }
+ }
+ }
+ &Purged |
+ &AddCombination(..) |
+ &AddVar(..) |
+ &OpenSnapshot |
+ &CommitedSnapshot => {}
+ }
+ }
+ }
+ }
+
+ fn into_set(self) -> FnvHashSet<ty::Region> {
+ self.regions
+ }
+
+ fn len(&self) -> usize {
+ self.regions.len()
+ }
+
+ fn add_edge(&mut self,
+ source: ty::Region,
+ target: ty::Region) {
+ if self.directions.incoming {
+ if self.regions.contains(&target) {
+ self.regions.insert(source);
+ }
+ }
+
+ if self.directions.outgoing {
+ if self.regions.contains(&source) {
+ self.regions.insert(target);
+ }
+ }
+ }
+}
+
impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> RegionVarBindings<'a, 'gcx, 'tcx> {
RegionVarBindings {
debug!("RegionVarBindings: commit({})", snapshot.length);
assert!(self.undo_log.borrow().len() > snapshot.length);
assert!((*self.undo_log.borrow())[snapshot.length] == OpenSnapshot);
+ assert!(self.skolemization_count.get() == snapshot.skolemization_count,
+ "failed to pop skolemized regions: {} now vs {} at start",
+ self.skolemization_count.get(),
+ snapshot.skolemization_count);
let mut undo_log = self.undo_log.borrow_mut();
if snapshot.length == 0 {
} else {
(*undo_log)[snapshot.length] = CommitedSnapshot;
}
- self.skolemization_count.set(snapshot.skolemization_count);
self.unification_table.borrow_mut().commit(snapshot.region_snapshot);
}
assert!(undo_log.len() > snapshot.length);
assert!((*undo_log)[snapshot.length] == OpenSnapshot);
while undo_log.len() > snapshot.length + 1 {
- match undo_log.pop().unwrap() {
- OpenSnapshot => {
- bug!("Failure to observe stack discipline");
- }
- CommitedSnapshot => {}
- AddVar(vid) => {
- let mut var_origins = self.var_origins.borrow_mut();
- var_origins.pop().unwrap();
- assert_eq!(var_origins.len(), vid.index as usize);
- }
- AddConstraint(ref constraint) => {
- self.constraints.borrow_mut().remove(constraint);
- }
- AddVerify(index) => {
- self.verifys.borrow_mut().pop();
- assert_eq!(self.verifys.borrow().len(), index);
- }
- AddGiven(sub, sup) => {
- self.givens.borrow_mut().remove(&(sub, sup));
- }
- AddCombination(Glb, ref regions) => {
- self.glbs.borrow_mut().remove(regions);
- }
- AddCombination(Lub, ref regions) => {
- self.lubs.borrow_mut().remove(regions);
- }
- }
+ self.rollback_undo_entry(undo_log.pop().unwrap());
}
let c = undo_log.pop().unwrap();
assert!(c == OpenSnapshot);
.rollback_to(snapshot.region_snapshot);
}
+ pub fn rollback_undo_entry(&self, undo_entry: UndoLogEntry) {
+ match undo_entry {
+ OpenSnapshot => {
+ panic!("Failure to observe stack discipline");
+ }
+ Purged | CommitedSnapshot => {
+ // nothing to do here
+ }
+ AddVar(vid) => {
+ let mut var_origins = self.var_origins.borrow_mut();
+ var_origins.pop().unwrap();
+ assert_eq!(var_origins.len(), vid.index as usize);
+ }
+ AddConstraint(ref constraint) => {
+ self.constraints.borrow_mut().remove(constraint);
+ }
+ AddVerify(index) => {
+ self.verifys.borrow_mut().pop();
+ assert_eq!(self.verifys.borrow().len(), index);
+ }
+ AddGiven(sub, sup) => {
+ self.givens.borrow_mut().remove(&(sub, sup));
+ }
+ AddCombination(Glb, ref regions) => {
+ self.glbs.borrow_mut().remove(regions);
+ }
+ AddCombination(Lub, ref regions) => {
+ self.lubs.borrow_mut().remove(regions);
+ }
+ }
+ }
+
pub fn num_vars(&self) -> u32 {
let len = self.var_origins.borrow().len();
// enforce no overflow
return vid;
}
+ pub fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin {
+ self.var_origins.borrow()[vid.index as usize].clone()
+ }
+
/// Creates a new skolemized region. Skolemized regions are fresh
/// regions used when performing higher-ranked computations. They
/// must be used in a very particular way and are never supposed
/// to "escape" out into error messages or the code at large.
///
/// The idea is to always create a snapshot. Skolemized regions
- /// can be created in the context of this snapshot, but once the
- /// snapshot is committed or rolled back, their numbers will be
- /// recycled, so you must be finished with them. See the extensive
- /// comments in `higher_ranked.rs` to see how it works (in
- /// particular, the subtyping comparison).
+ /// can be created in the context of this snapshot, but before the
+ /// snapshot is committed or rolled back, they must be popped
+ /// (using `pop_skolemized_regions`), so that their numbers can be
+ /// recycled. Normally you don't have to think about this: you use
+ /// the APIs in `higher_ranked/mod.rs`, such as
+ /// `skolemize_late_bound_regions` and `plug_leaks`, which will
+ /// guide you on this path (ensure that the `SkolemizationMap` is
+ /// consumed and you are good). There are also somewhat extensive
+ /// comments in `higher_ranked/README.md`.
///
/// The `snapshot` argument to this function is not really used;
/// it's just there to make it explicit which snapshot bounds the
- /// skolemized region that results.
- pub fn new_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot) -> Region {
+ /// skolemized region that results. It should always be the top-most snapshot.
+ pub fn push_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot) -> Region {
assert!(self.in_snapshot());
assert!(self.undo_log.borrow()[snapshot.length] == OpenSnapshot);
ReSkolemized(ty::SkolemizedRegionVid { index: sc }, br)
}
+ /// Removes all the edges to/from the skolemized regions that are
+ /// in `skols`. This is used after a higher-ranked operation
+ /// completes to remove all trace of the skolemized regions
+ /// created in that time.
+ pub fn pop_skolemized(&self,
+ skols: &FnvHashSet<ty::Region>,
+ snapshot: &RegionSnapshot) {
+ debug!("pop_skolemized_regions(skols={:?})", skols);
+
+ assert!(self.in_snapshot());
+ assert!(self.undo_log.borrow()[snapshot.length] == OpenSnapshot);
+ assert!(self.skolemization_count.get() as usize >= skols.len(),
+ "popping more skolemized variables than actually exist, \
+ sc now = {}, skols.len = {}",
+ self.skolemization_count.get(),
+ skols.len());
+
+ let last_to_pop = self.skolemization_count.get();
+ let first_to_pop = last_to_pop - (skols.len() as u32);
+
+ assert!(first_to_pop >= snapshot.skolemization_count,
+ "popping more regions than snapshot contains, \
+ sc now = {}, sc then = {}, skols.len = {}",
+ self.skolemization_count.get(),
+ snapshot.skolemization_count,
+ skols.len());
+ debug_assert! {
+ skols.iter()
+ .all(|k| match *k {
+ ty::ReSkolemized(index, _) =>
+ index.index >= first_to_pop &&
+ index.index < last_to_pop,
+ _ =>
+ false
+ }),
+ "invalid skolemization keys or keys out of range ({}..{}): {:?}",
+ snapshot.skolemization_count,
+ self.skolemization_count.get(),
+ skols
+ }
+
+ let mut undo_log = self.undo_log.borrow_mut();
+
+ let constraints_to_kill: Vec<usize> =
+ undo_log.iter()
+ .enumerate()
+ .rev()
+ .filter(|&(_, undo_entry)| kill_constraint(skols, undo_entry))
+ .map(|(index, _)| index)
+ .collect();
+
+ for index in constraints_to_kill {
+ let undo_entry = mem::replace(&mut undo_log[index], Purged);
+ self.rollback_undo_entry(undo_entry);
+ }
+
+ self.skolemization_count.set(snapshot.skolemization_count);
+ return;
+
+ fn kill_constraint(skols: &FnvHashSet<ty::Region>,
+ undo_entry: &UndoLogEntry)
+ -> bool {
+ match undo_entry {
+ &AddConstraint(ConstrainVarSubVar(_, _)) =>
+ false,
+ &AddConstraint(ConstrainRegSubVar(a, _)) =>
+ skols.contains(&a),
+ &AddConstraint(ConstrainVarSubReg(_, b)) =>
+ skols.contains(&b),
+ &AddGiven(_, _) =>
+ false,
+ &AddVerify(_) =>
+ false,
+ &AddCombination(_, ref two_regions) =>
+ skols.contains(&two_regions.a) ||
+ skols.contains(&two_regions.b),
+ &AddVar(..) |
+ &OpenSnapshot |
+ &Purged |
+ &CommitedSnapshot =>
+ false,
+ }
+ }
+
+ }
+
pub fn new_bound(&self, debruijn: ty::DebruijnIndex) -> Region {
// Creates a fresh bound variable for use in GLB computations.
// See discussion of GLB computation in the large comment at
.collect()
}
- /// Computes all regions that have been related to `r0` in any way since the mark `mark` was
- /// made---`r0` itself will be the first entry. This is used when checking whether skolemized
- /// regions are being improperly related to other regions.
- pub fn tainted(&self, mark: &RegionSnapshot, r0: Region) -> Vec<Region> {
- debug!("tainted(mark={:?}, r0={:?})", mark, r0);
- let _indenter = indenter();
+ /// Computes all regions that have been related to `r0` since the
+ /// mark `mark` was made---`r0` itself will be the first
+ /// entry. The `directions` parameter controls what kind of
+ /// relations are considered. For example, one can say that only
+ /// "incoming" edges to `r0` are desired, in which case one will
+ /// get the set of regions `{r|r <= r0}`. This is used when
+ /// checking whether skolemized regions are being improperly
+ /// related to other regions.
+ pub fn tainted(&self,
+ mark: &RegionSnapshot,
+ r0: Region,
+ directions: TaintDirections)
+ -> FnvHashSet<ty::Region> {
+ debug!("tainted(mark={:?}, r0={:?}, directions={:?})",
+ mark, r0, directions);
// `result_set` acts as a worklist: we explore all outgoing
// edges and add any new regions we find to result_set. This
// is not a terribly efficient implementation.
- let mut result_set = vec![r0];
- let mut result_index = 0;
- while result_index < result_set.len() {
- // nb: can't use usize::range() here because result_set grows
- let r = result_set[result_index];
- debug!("result_index={}, r={:?}", result_index, r);
-
- for undo_entry in self.undo_log.borrow()[mark.length..].iter() {
- match undo_entry {
- &AddConstraint(ConstrainVarSubVar(a, b)) => {
- consider_adding_bidirectional_edges(&mut result_set, r, ReVar(a), ReVar(b));
- }
- &AddConstraint(ConstrainRegSubVar(a, b)) => {
- consider_adding_bidirectional_edges(&mut result_set, r, a, ReVar(b));
- }
- &AddConstraint(ConstrainVarSubReg(a, b)) => {
- consider_adding_bidirectional_edges(&mut result_set, r, ReVar(a), b);
- }
- &AddGiven(a, b) => {
- consider_adding_bidirectional_edges(&mut result_set,
- r,
- ReFree(a),
- ReVar(b));
- }
- &AddVerify(i) => {
- match (*self.verifys.borrow())[i] {
- VerifyRegSubReg(_, a, b) => {
- consider_adding_bidirectional_edges(&mut result_set, r, a, b);
- }
- VerifyGenericBound(_, _, a, ref bound) => {
- bound.for_each_region(&mut |b| {
- consider_adding_bidirectional_edges(&mut result_set, r, a, b)
- });
- }
- }
- }
- &AddCombination(..) |
- &AddVar(..) |
- &OpenSnapshot |
- &CommitedSnapshot => {}
- }
- }
-
- result_index += 1;
- }
-
- return result_set;
-
- fn consider_adding_bidirectional_edges(result_set: &mut Vec<Region>,
- r: Region,
- r1: Region,
- r2: Region) {
- consider_adding_directed_edge(result_set, r, r1, r2);
- consider_adding_directed_edge(result_set, r, r2, r1);
- }
-
- fn consider_adding_directed_edge(result_set: &mut Vec<Region>,
- r: Region,
- r1: Region,
- r2: Region) {
- if r == r1 {
- // Clearly, this is potentially inefficient.
- if !result_set.iter().any(|x| *x == r2) {
- result_set.push(r2);
- }
- }
- }
+ let mut taint_set = TaintSet::new(directions, r0);
+ taint_set.fixed_point(&self.undo_log.borrow()[mark.length..],
+ &self.verifys.borrow());
+ debug!("tainted: result={:?}", taint_set.regions);
+ return taint_set.into_set();
}
/// This function performs the actual region resolution. It must be
let skol_obligation = obligation.with(skol_predicate);
match project_and_unify_type(selcx, &skol_obligation) {
Ok(result) => {
- match infcx.leak_check(false, &skol_map, snapshot) {
+ let span = obligation.cause.span;
+ match infcx.leak_check(false, span, &skol_map, snapshot) {
Ok(()) => Ok(infcx.plug_leaks(skol_map, snapshot, &result)),
Err(e) => Err(MismatchedProjectionTypes { err: e }),
}
depth: usize)
-> Option<NormalizedTy<'tcx>>
{
- debug!("normalize_projection_type(\
+ let infcx = selcx.infcx();
+
+ let projection_ty = infcx.resolve_type_vars_if_possible(&projection_ty);
+
+ debug!("opt_normalize_projection_type(\
projection_ty={:?}, \
depth={})",
projection_ty,
// an impl, where-clause etc) and hence we must
// re-normalize it
- debug!("normalize_projection_type: projected_ty={:?} depth={} obligations={:?}",
+ debug!("opt_normalize_projection_type: \
+ projected_ty={:?} depth={} obligations={:?}",
projected_ty,
depth,
obligations);
let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth+1);
let normalized_ty = normalizer.fold(&projected_ty);
- debug!("normalize_projection_type: normalized_ty={:?} depth={}",
+ debug!("opt_normalize_projection_type: \
+ normalized_ty={:?} depth={}",
normalized_ty,
depth);
}
}
Ok(ProjectedTy::NoProgress(projected_ty)) => {
- debug!("normalize_projection_type: projected_ty={:?} no progress",
+ debug!("opt_normalize_projection_type: \
+ projected_ty={:?} no progress",
projected_ty);
Some(Normalized {
value: projected_ty,
})
}
Err(ProjectionTyError::TooManyCandidates) => {
- debug!("normalize_projection_type: too many candidates");
+ debug!("opt_normalize_projection_type: \
+ too many candidates");
None
}
Err(ProjectionTyError::TraitSelectionError(_)) => {
- debug!("normalize_projection_type: ERROR");
+ debug!("opt_normalize_projection_type: ERROR");
// if we got an error processing the `T as Trait` part,
// just return `ty::err` but add the obligation `T :
// Trait`, which when processed will cause the error to be
use std::cell::RefCell;
use std::fmt;
use std::marker::PhantomData;
+use std::mem;
use std::rc::Rc;
use syntax::abi::Abi;
use hir;
skol_trait_predicate.trait_ref.clone(),
&skol_map,
snapshot);
+
+ self.infcx.pop_skolemized(skol_map, snapshot);
+
assert!(result);
true
}
Err(_) => { return false; }
}
- self.infcx.leak_check(false, skol_map, snapshot).is_ok()
+ self.infcx.leak_check(false, obligation.cause.span, skol_map, snapshot).is_ok()
}
/// Given an obligation like `<SomeTrait for T>`, search the obligations that the caller
self.tcx(),
obligation.predicate.0.trait_ref.self_ty(),
|impl_def_id| {
- self.probe(|this, snapshot| {
- if let Ok(_) = this.match_impl(impl_def_id, obligation, snapshot) {
- candidates.vec.push(ImplCandidate(impl_def_id));
+ self.probe(|this, snapshot| { /* [1] */
+ match this.match_impl(impl_def_id, obligation, snapshot) {
+ Ok(skol_map) => {
+ candidates.vec.push(ImplCandidate(impl_def_id));
+
+ // NB: we can safely drop the skol map
+ // since we are in a probe [1]
+ mem::drop(skol_map);
+ }
+ Err(_) => { }
}
});
}
return;
}
- self.probe(|this, snapshot| {
- let (self_ty, _) =
- this.infcx().skolemize_late_bound_regions(&obligation.self_ty(), snapshot);
+ self.probe(|this, _snapshot| {
+ // the code below doesn't care about regions, and the
+ // self-ty here doesn't escape this probe, so just erase
+ // any LBR.
+ let self_ty = this.tcx().erase_late_bound_regions(&obligation.self_ty());
let poly_trait_ref = match self_ty.sty {
ty::TyTrait(ref data) => {
match this.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) {
})?;
self.inferred_obligations.extend(obligations);
- if let Err(e) = self.infcx.leak_check(false, &skol_map, snapshot) {
+ if let Err(e) = self.infcx.leak_check(false,
+ obligation.cause.span,
+ &skol_map,
+ snapshot) {
debug!("match_impl: failed leak check due to `{}`", e);
return Err(());
}
// except according to those terms.
use middle::free_region::FreeRegionMap;
-use rustc::infer::{self, InferOk, TypeOrigin};
+use rustc::infer::{self, InferOk, InferResult, TypeOrigin};
use rustc::ty;
use rustc::traits::{self, ProjectionMode};
use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace};
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Targeted tests for the higher-ranked subtyping code.
+
+#![feature(rustc_attrs)]
+#![allow(dead_code)]
+
+// revisions: bound_a_vs_bound_a
+// revisions: bound_a_vs_bound_b
+// revisions: bound_inv_a_vs_bound_inv_b
+// revisions: bound_co_a_vs_bound_co_b
+// revisions: bound_a_vs_free_x
+// revisions: free_x_vs_free_x
+// revisions: free_x_vs_free_y
+// revisions: free_inv_x_vs_free_inv_y
+// revisions: bound_a_b_vs_bound_a
+// revisions: bound_co_a_b_vs_bound_co_a
+// revisions: bound_contra_a_contra_b_ret_co_a
+// revisions: bound_co_a_co_b_ret_contra_a
+// revisions: bound_inv_a_b_vs_bound_inv_a
+// revisions: bound_a_b_ret_a_vs_bound_a_ret_a
+
+fn gimme<T>(_: Option<T>) { }
+
+struct Inv<'a> { x: *mut &'a u32 }
+
+struct Co<'a> { x: fn(&'a u32) }
+
+struct Contra<'a> { x: &'a u32 }
+
+macro_rules! check {
+ ($rev:ident: ($t1:ty, $t2:ty)) => {
+ #[cfg($rev)]
+ fn subtype<'x,'y:'x,'z:'y>() {
+ gimme::<$t2>(None::<$t1>);
+ //[free_inv_x_vs_free_inv_y]~^ ERROR mismatched types
+ }
+
+ #[cfg($rev)]
+ fn supertype<'x,'y:'x,'z:'y>() {
+ gimme::<$t1>(None::<$t2>);
+ //[bound_a_vs_free_x]~^ ERROR mismatched types
+ //[free_x_vs_free_y]~^^ ERROR mismatched types
+ //[bound_inv_a_b_vs_bound_inv_a]~^^^ ERROR mismatched types
+ //[bound_a_b_ret_a_vs_bound_a_ret_a]~^^^^ ERROR mismatched types
+ //[free_inv_x_vs_free_inv_y]~^^^^^ ERROR mismatched types
+ }
+ }
+}
+
+// If both have bound regions, they are equivalent, regardless of
+// variant.
+check! { bound_a_vs_bound_a: (for<'a> fn(&'a u32),
+ for<'a> fn(&'a u32)) }
+check! { bound_a_vs_bound_b: (for<'a> fn(&'a u32),
+ for<'b> fn(&'b u32)) }
+check! { bound_inv_a_vs_bound_inv_b: (for<'a> fn(Inv<'a>),
+ for<'b> fn(Inv<'b>)) }
+check! { bound_co_a_vs_bound_co_b: (for<'a> fn(Co<'a>),
+ for<'b> fn(Co<'b>)) }
+
+// Bound is a subtype of free.
+check! { bound_a_vs_free_x: (for<'a> fn(&'a u32),
+ fn(&'x u32)) }
+
+// Two free regions are relatable if subtyping holds.
+check! { free_x_vs_free_x: (fn(&'x u32),
+ fn(&'x u32)) }
+check! { free_x_vs_free_y: (fn(&'x u32),
+ fn(&'y u32)) }
+check! { free_inv_x_vs_free_inv_y: (fn(Inv<'x>),
+ fn(Inv<'y>)) }
+
+// Somewhat surprisingly, a fn taking two distinct bound lifetimes and
+// a fn taking one bound lifetime can be interchangable, but only if
+// we are co- or contra-variant with respect to both lifetimes.
+//
+// The reason is:
+// - if we are covariant, then 'a and 'b can be set to the call-site
+// intersection;
+// - if we are contravariant, then 'a can be inferred to 'static.
+check! { bound_a_b_vs_bound_a: (for<'a,'b> fn(&'a u32, &'b u32),
+ for<'a> fn(&'a u32, &'a u32)) }
+check! { bound_co_a_b_vs_bound_co_a: (for<'a,'b> fn(Co<'a>, Co<'b>),
+ for<'a> fn(Co<'a>, Co<'a>)) }
+check! { bound_contra_a_contra_b_ret_co_a: (for<'a,'b> fn(Contra<'a>, Contra<'b>) -> Co<'a>,
+ for<'a> fn(Contra<'a>, Contra<'a>) -> Co<'a>) }
+check! { bound_co_a_co_b_ret_contra_a: (for<'a,'b> fn(Co<'a>, Co<'b>) -> Contra<'a>,
+ for<'a> fn(Co<'a>, Co<'a>) -> Contra<'a>) }
+
+// If we make those lifetimes invariant, then the two types are not interchangable.
+check! { bound_inv_a_b_vs_bound_inv_a: (for<'a,'b> fn(Inv<'a>, Inv<'b>),
+ for<'a> fn(Inv<'a>, Inv<'a>)) }
+check! { bound_a_b_ret_a_vs_bound_a_ret_a: (for<'a,'b> fn(&'a u32, &'b u32) -> &'a u32,
+ for<'a> fn(&'a u32, &'a u32) -> &'a u32) }
+
+#[rustc_error]
+fn main() {
+//[bound_a_vs_bound_a]~^ ERROR compilation successful
+//[bound_a_vs_bound_b]~^^ ERROR compilation successful
+//[bound_inv_a_vs_bound_inv_b]~^^^ ERROR compilation successful
+//[bound_co_a_vs_bound_co_b]~^^^^ ERROR compilation successful
+//[free_x_vs_free_x]~^^^^^ ERROR compilation successful
+//[bound_a_b_vs_bound_a]~^^^^^^ ERROR compilation successful
+//[bound_co_a_b_vs_bound_co_a]~^^^^^^^ ERROR compilation successful
+//[bound_contra_a_contra_b_ret_co_a]~^^^^^^^^ ERROR compilation successful
+//[bound_co_a_co_b_ret_contra_a]~^^^^^^^^^ ERROR compilation successful
+}
fn make_object3<'a,'b,A:SomeTrait+'a>(v: A) -> Box<SomeTrait+'b> {
box v as Box<SomeTrait+'b>
- //~^ ERROR the parameter type `A` may not live long enough
- //~^^ ERROR the parameter type `A` may not live long enough
+ //~^ ERROR E0478
}
fn main() { }
fn foo(&self) { }
}
-impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) {
+impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
}
-impl Contravariant for for<'a> fn(&'a u8, &'a u8) {
+impl Contravariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
}
///////////////////////////////////////////////////////////////////////////
fn foo(&self) { }
}
-impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) {
+impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
}
-impl Covariant for for<'a> fn(&'a u8, &'a u8) {
+impl Covariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
}
///////////////////////////////////////////////////////////////////////////
fn foo(&self) { }
}
-impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) {
+impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
}
-impl Invariant for for<'a> fn(&'a u8, &'a u8) {
+impl Invariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
}
fn main() { }