1 use rustc_index::bit_set::BitMatrix;
2 use crate::fx::FxHashMap;
3 use crate::stable_hasher::{HashStable, StableHasher};
5 use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
13 #[derive(Clone, Debug)]
14 pub struct TransitiveRelation<T: Eq + Hash> {
15 // List of elements. This is used to map from a T to a usize.
18 // Maps each element to an index.
19 map: FxHashMap<T, Index>,
21 // List of base edges in the graph. Require to compute transitive
25 // This is a cached transitive closure derived from the edges.
26 // Currently, we build it lazilly and just throw out any existing
27 // copy whenever a new edge is added. (The Lock is to permit
28 // the lazy computation.) This is kind of silly, except for the
29 // fact its size is tied to `self.elements.len()`, so I wanted to
30 // wait before building it up to avoid reallocating as new edges
31 // are added with new elements. Perhaps better would be to ask the
32 // user for a batch of edges to minimize this effect, but I
33 // already wrote the code this way. :P -nmatsakis
34 closure: Lock<Option<BitMatrix<usize, usize>>>,
37 // HACK(eddyb) manual impl avoids `Default` bound on `T`.
38 impl<T: Eq + Hash> Default for TransitiveRelation<T> {
39 fn default() -> Self {
41 elements: Default::default(),
42 map: Default::default(),
43 edges: Default::default(),
44 closure: Default::default(),
49 #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, RustcEncodable, RustcDecodable, Debug)]
52 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
58 impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
59 pub fn is_empty(&self) -> bool {
63 pub fn elements(&self) -> impl Iterator<Item=&T> + ExactSizeIterator {
67 fn index(&self, a: &T) -> Option<Index> {
68 self.map.get(a).cloned()
71 fn add_index(&mut self, a: T) -> Index {
72 let &mut TransitiveRelation {
83 // if we changed the dimensions, clear the cache
84 *closure.get_mut() = None;
86 Index(elements.len() - 1)
90 /// Applies the (partial) function to each edge and returns a new
91 /// relation. If `f` returns `None` for any end-point, returns
93 pub fn maybe_map<F, U>(&self, mut f: F) -> Option<TransitiveRelation<U>>
94 where F: FnMut(&T) -> Option<U>,
95 U: Clone + Debug + Eq + Hash + Clone,
97 let mut result = TransitiveRelation::default();
98 for edge in &self.edges {
99 result.add(f(&self.elements[edge.source.0])?, f(&self.elements[edge.target.0])?);
104 /// Indicate that `a < b` (where `<` is this relation)
105 pub fn add(&mut self, a: T, b: T) {
106 let a = self.add_index(a);
107 let b = self.add_index(b);
112 if !self.edges.contains(&edge) {
113 self.edges.push(edge);
115 // added an edge, clear the cache
116 *self.closure.get_mut() = None;
120 /// Checks whether `a < target` (transitively)
121 pub fn contains(&self, a: &T, b: &T) -> bool {
122 match (self.index(a), self.index(b)) {
123 (Some(a), Some(b)) => self.with_closure(|closure| closure.contains(a.0, b.0)),
124 (None, _) | (_, None) => false,
128 /// Thinking of `x R y` as an edge `x -> y` in a graph, this
129 /// returns all things reachable from `a`.
131 /// Really this probably ought to be `impl Iterator<Item = &T>`, but
132 /// I'm too lazy to make that work, and -- given the caching
133 /// strategy -- it'd be a touch tricky anyhow.
134 pub fn reachable_from(&self, a: &T) -> Vec<&T> {
135 match self.index(a) {
136 Some(a) => self.with_closure(|closure| {
137 closure.iter(a.0).map(|i| &self.elements[i]).collect()
143 /// Picks what I am referring to as the "postdominating"
144 /// upper-bound for `a` and `b`. This is usually the least upper
145 /// bound, but in cases where there is no single least upper
146 /// bound, it is the "mutual immediate postdominator", if you
147 /// imagine a graph where `a < b` means `a -> b`.
149 /// This function is needed because region inference currently
150 /// requires that we produce a single "UB", and there is no best
151 /// choice for the LUB. Rather than pick arbitrarily, I pick a
152 /// less good, but predictable choice. This should help ensure
153 /// that region inference yields predictable results (though it
154 /// itself is not fully sufficient).
156 /// Examples are probably clearer than any prose I could write
157 /// (there are corresponding tests below, btw). In each case,
158 /// the query is `postdom_upper_bound(a, b)`:
161 /// // Returns Some(x), which is also LUB.
167 /// // Returns `Some(x)`, which is not LUB (there is none)
168 /// // diagonal edges run left-to-right.
174 /// // Returns `None`.
178 pub fn postdom_upper_bound(&self, a: &T, b: &T) -> Option<&T> {
179 let mubs = self.minimal_upper_bounds(a, b);
180 self.mutual_immediate_postdominator(mubs)
183 /// Viewing the relation as a graph, computes the "mutual
184 /// immediate postdominator" of a set of points (if one
185 /// exists). See `postdom_upper_bound` for details.
186 pub fn mutual_immediate_postdominator<'a>(&'a self, mut mubs: Vec<&'a T>) -> Option<&'a T> {
190 1 => return Some(mubs[0]),
192 let m = mubs.pop().unwrap();
193 let n = mubs.pop().unwrap();
194 mubs.extend(self.minimal_upper_bounds(n, m));
200 /// Returns the set of bounds `X` such that:
202 /// - `a < X` and `b < X`
203 /// - there is no `Y != X` such that `a < Y` and `Y < X`
204 /// - except for the case where `X < a` (i.e., a strongly connected
205 /// component in the graph). In that case, the smallest
206 /// representative of the SCC is returned (as determined by the
207 /// internal indices).
209 /// Note that this set can, in principle, have any size.
210 pub fn minimal_upper_bounds(&self, a: &T, b: &T) -> Vec<&T> {
211 let (mut a, mut b) = match (self.index(a), self.index(b)) {
212 (Some(a), Some(b)) => (a, b),
213 (None, _) | (_, None) => {
218 // in some cases, there are some arbitrary choices to be made;
219 // it doesn't really matter what we pick, as long as we pick
220 // the same thing consistently when queried, so ensure that
221 // (a, b) are in a consistent relative order
223 mem::swap(&mut a, &mut b);
226 let lub_indices = self.with_closure(|closure| {
227 // Easy case is when either a < b or b < a:
228 if closure.contains(a.0, b.0) {
231 if closure.contains(b.0, a.0) {
235 // Otherwise, the tricky part is that there may be some c
236 // where a < c and b < c. In fact, there may be many such
237 // values. So here is what we do:
239 // 1. Find the vector `[X | a < X && b < X]` of all values
240 // `X` where `a < X` and `b < X`. In terms of the
241 // graph, this means all values reachable from both `a`
242 // and `b`. Note that this vector is also a set, but we
243 // use the term vector because the order matters
244 // to the steps below.
245 // - This vector contains upper bounds, but they are
246 // not minimal upper bounds. So you may have e.g.
247 // `[x, y, tcx, z]` where `x < tcx` and `y < tcx` and
248 // `z < x` and `z < y`:
250 // z --+---> x ----+----> tcx
255 // In this case, we really want to return just `[z]`.
256 // The following steps below achieve this by gradually
257 // reducing the list.
258 // 2. Pare down the vector using `pare_down`. This will
259 // remove elements from the vector that can be reached
260 // by an earlier element.
261 // - In the example above, this would convert `[x, y,
262 // tcx, z]` to `[x, y, z]`. Note that `x` and `y` are
263 // still in the vector; this is because while `z < x`
264 // (and `z < y`) holds, `z` comes after them in the
266 // 3. Reverse the vector and repeat the pare down process.
267 // - In the example above, we would reverse to
268 // `[z, y, x]` and then pare down to `[z]`.
269 // 4. Reverse once more just so that we yield a vector in
270 // increasing order of index. Not necessary, but why not.
272 // I believe this algorithm yields a minimal set. The
273 // argument is that, after step 2, we know that no element
274 // can reach its successors (in the vector, not the graph).
275 // After step 3, we know that no element can reach any of
276 // its predecesssors (because of step 2) nor successors
277 // (because we just called `pare_down`)
279 // This same algorithm is used in `parents` below.
281 let mut candidates = closure.intersect_rows(a.0, b.0); // (1)
282 pare_down(&mut candidates, closure); // (2)
283 candidates.reverse(); // (3a)
284 pare_down(&mut candidates, closure); // (3b)
288 lub_indices.into_iter()
290 .map(|i| &self.elements[i])
294 /// Given an element A, returns the maximal set {B} of elements B
299 /// - for each i, j: B[i] R B[j] does not hold
301 /// The intuition is that this moves "one step up" through a lattice
302 /// (where the relation is encoding the `<=` relation for the lattice).
303 /// So e.g., if the relation is `->` and we have
311 /// then `parents(a)` returns `[b, c]`. The `postdom_parent` function
312 /// would further reduce this to just `f`.
313 pub fn parents(&self, a: &T) -> Vec<&T> {
314 let a = match self.index(a) {
316 None => return vec![]
319 // Steal the algorithm for `minimal_upper_bounds` above, but
320 // with a slight tweak. In the case where `a R a`, we remove
321 // that from the set of candidates.
322 let ancestors = self.with_closure(|closure| {
323 let mut ancestors = closure.intersect_rows(a.0, a.0);
325 // Remove anything that can reach `a`. If this is a
326 // reflexive relation, this will include `a` itself.
327 ancestors.retain(|&e| !closure.contains(e, a.0));
329 pare_down(&mut ancestors, closure); // (2)
330 ancestors.reverse(); // (3a)
331 pare_down(&mut ancestors, closure); // (3b)
335 ancestors.into_iter()
337 .map(|i| &self.elements[i])
341 /// A "best" parent in some sense. See `parents` and
342 /// `postdom_upper_bound` for more details.
343 pub fn postdom_parent(&self, a: &T) -> Option<&T> {
344 self.mutual_immediate_postdominator(self.parents(a))
347 fn with_closure<OP, R>(&self, op: OP) -> R
348 where OP: FnOnce(&BitMatrix<usize, usize>) -> R
350 let mut closure_cell = self.closure.borrow_mut();
351 let mut closure = closure_cell.take();
352 if closure.is_none() {
353 closure = Some(self.compute_closure());
355 let result = op(closure.as_ref().unwrap());
356 *closure_cell = closure;
360 fn compute_closure(&self) -> BitMatrix<usize, usize> {
361 let mut matrix = BitMatrix::new(self.elements.len(),
362 self.elements.len());
363 let mut changed = true;
366 for edge in &self.edges {
367 // add an edge from S -> T
368 changed |= matrix.insert(edge.source.0, edge.target.0);
370 // add all outgoing edges from T into S
371 changed |= matrix.union_rows(edge.target.0, edge.source.0);
378 /// Pare down is used as a step in the LUB computation. It edits the
379 /// candidates array in place by removing any element j for which
380 /// there exists an earlier element i<j such that i -> j. That is,
381 /// after you run `pare_down`, you know that for all elements that
382 /// remain in candidates, they cannot reach any of the elements that
385 /// Examples follow. Assume that a -> b -> c and x -> y -> z.
387 /// - Input: `[a, b, x]`. Output: `[a, x]`.
388 /// - Input: `[b, a, x]`. Output: `[b, a, x]`.
389 /// - Input: `[a, x, b, y]`. Output: `[a, x]`.
390 fn pare_down(candidates: &mut Vec<usize>, closure: &BitMatrix<usize, usize>) {
392 while i < candidates.len() {
393 let candidate_i = candidates[i];
398 while j < candidates.len() {
399 let candidate_j = candidates[j];
400 if closure.contains(candidate_i, candidate_j) {
401 // If `i` can reach `j`, then we can remove `j`. So just
402 // mark it as dead and move on; subsequent indices will be
403 // shifted into its place.
406 candidates[j - dead] = candidate_j;
410 candidates.truncate(j - dead);
414 impl<T> Encodable for TransitiveRelation<T>
415 where T: Clone + Encodable + Debug + Eq + Hash + Clone
417 fn encode<E: Encoder>(&self, s: &mut E) -> Result<(), E::Error> {
418 s.emit_struct("TransitiveRelation", 2, |s| {
419 s.emit_struct_field("elements", 0, |s| self.elements.encode(s))?;
420 s.emit_struct_field("edges", 1, |s| self.edges.encode(s))?;
426 impl<T> Decodable for TransitiveRelation<T>
427 where T: Clone + Decodable + Debug + Eq + Hash + Clone
429 fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> {
430 d.read_struct("TransitiveRelation", 2, |d| {
431 let elements: Vec<T> = d.read_struct_field("elements", 0, |d| Decodable::decode(d))?;
432 let edges = d.read_struct_field("edges", 1, |d| Decodable::decode(d))?;
433 let map = elements.iter()
435 .map(|(index, elem)| (elem.clone(), Index(index)))
437 Ok(TransitiveRelation { elements, edges, map, closure: Lock::new(None) })
442 impl<CTX, T> HashStable<CTX> for TransitiveRelation<T>
443 where T: HashStable<CTX> + Eq + Debug + Clone + Hash
445 fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
446 // We are assuming here that the relation graph has been built in a
447 // deterministic way and we can just hash it the way it is.
448 let TransitiveRelation {
451 // "map" is just a copy of elements vec
453 // "closure" is just a copy of the data above
457 elements.hash_stable(hcx, hasher);
458 edges.hash_stable(hcx, hasher);
462 impl<CTX> HashStable<CTX> for Edge {
463 fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
469 source.hash_stable(hcx, hasher);
470 target.hash_stable(hcx, hasher);
474 impl<CTX> HashStable<CTX> for Index {
475 fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
476 let Index(idx) = *self;
477 idx.hash_stable(hcx, hasher);