1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 // This test exercises cases where cyclic structure is legal,
13 // including when the cycles go through data-structures such
14 // as `Vec` or `TypedArena`.
16 // The intent is to cover as many such cases as possible, ensuring
17 // that if the compiler did not complain circa Rust 1.x (1.2 as of
18 // this writing), then it will continue to not complain in the future.
20 // Note that while some of the tests are only exercising using the
21 // given collection as a "backing store" for a set of nodes that hold
22 // the actual cycle (and thus the cycle does not go through the
23 // collection itself in such cases), in general we *do* want to make
24 // sure to have at least one example exercising a cycle that goes
25 // through the collection, for every collection type that supports
28 // HIGH LEVEL DESCRIPTION OF THE TEST ARCHITECTURE
29 // -----------------------------------------------
31 // We pick a data structure and want to make a cyclic construction
32 // from it. Each test of interest is labelled starting with "Cycle N:
33 // { ... }" where N is the test number and the "..."`is filled in with
34 // a graphviz-style description of the graph structure that the
35 // author believes is being made. So "{ a -> b, b -> (c,d), (c,d) -> e }"
36 // describes a line connected to a diamond:
44 // (Note that the above directed graph is actually acyclic.)
46 // The different graph structures are often composed of different data
47 // types. Some may be built atop `Vec`, others atop `HashMap`, etc.
49 // For each graph structure, we actually *confirm* that a cycle exists
50 // (as a safe-guard against a test author accidentally leaving it out)
51 // by traversing each graph and "proving" that a cycle exists within it.
53 // To do this, while trying to keep the code uniform (despite working
54 // with different underlying collection and smart-pointer types), we
55 // have a standard traversal API:
57 // 1. every node in the graph carries a `mark` (a u32, init'ed to 0).
59 // 2. every node provides a method to visit its children
61 // 3. a traversal attmepts to visit the nodes of the graph and prove that
62 // it sees the same node twice. It does this by setting the mark of each
63 // node to a fresh non-zero value, and if it sees the current mark, it
64 // "knows" that it must have found a cycle, and stops attempting further
67 // 4. each traversal is controlled by a bit-string that tells it which child
68 // it visit when it can take different paths. As a simple example,
69 // in a binary tree, 0 could mean "left" (and 1, "right"), so that
70 // "00010" means "left, left, left, right, left". (In general it will
71 // read as many bits as it needs to choose one child.)
73 // The graphs in this test are all meant to be very small, and thus
74 // short bitstrings of less than 64 bits should always suffice.
76 // (An earlier version of this test infrastructure simply had any
77 // given traversal visit all children it encountered, in a
78 // depth-first manner; one problem with this approach is that an
79 // acyclic graph can still have sharing, which would then be treated
80 // as a repeat mark and reported as a detected cycle.)
82 // The travseral code is a little more complicated because it has been
83 // programmed in a somewhat defensive manner. For example it also has
84 // a max threshold for the number of nodes it will visit, to guard
85 // against scenarios where the nodes are not correctly setting their
86 // mark when asked. There are various other methods not discussed here
87 // that are for aiding debugging the test when it runs, such as the
88 // `name` method that all nodes provide.
92 // 1. allocates the nodes in the graph,
94 // 2. sets up the links in the graph,
96 // 3. clones the "ContextData"
98 // 4. chooses a new current mark value for this test
100 // 5. initiates a traversal, potentially from multiple starting points
101 // (aka "roots"), with a given control-string (potentially a
102 // different string for each root). if it does start from a
103 // distinct root, then such a test should also increment the
104 // current mark value, so that this traversal is considered
105 // distinct from the prior one on this graph structure.
107 // Note that most of the tests work with the default control string
110 // 6. assert that the context confirms that it actually saw a cycle (since a traversal
111 // might have terminated, e.g. on a tree structure that contained no cycles).
113 use std::cell::{Cell, RefCell};
114 use std::cmp::Ordering;
115 use std::collections::BinaryHeap;
116 use std::collections::HashMap;
117 use std::collections::LinkedList;
118 use std::collections::VecDeque;
119 use std::collections::btree_map::BTreeMap;
120 use std::collections::btree_set::BTreeSet;
121 use std::hash::{Hash, Hasher};
123 use std::sync::{Arc, RwLock, Mutex};
125 const PRINT: bool = false;
128 let c_orig = ContextData {
135 saw_prev_marked: false,
139 // SANITY CHECK FOR TEST SUITE (thus unnumbered)
140 // Not a cycle: { v[0] -> (v[1], v[2]), v[1] -> v[3], v[2] -> v[3] };
141 let v: Vec<S2> = vec![Named::new("s0"),
145 v[0].next.set((Some(&v[1]), Some(&v[2])));
146 v[1].next.set((Some(&v[3]), None));
147 v[2].next.set((Some(&v[3]), None));
148 v[3].next.set((None, None));
150 let mut c = c_orig.clone();
152 assert!(!c.saw_prev_marked);
153 v[0].descend_into_self(&mut c);
154 assert!(!c.saw_prev_marked); // <-- different from below, b/c acyclic above
156 if PRINT { println!(""); }
158 // Cycle 1: { v[0] -> v[1], v[1] -> v[0] };
159 // does not exercise `v` itself
160 let v: Vec<S> = vec![Named::new("s0"),
162 v[0].next.set(Some(&v[1]));
163 v[1].next.set(Some(&v[0]));
165 let mut c = c_orig.clone();
167 assert!(!c.saw_prev_marked);
168 v[0].descend_into_self(&mut c);
169 assert!(c.saw_prev_marked);
171 if PRINT { println!(""); }
173 // Cycle 2: { v[0] -> v, v[1] -> v }
174 let v: V = Named::new("v");
175 v.contents[0].set(Some(&v));
176 v.contents[1].set(Some(&v));
178 let mut c = c_orig.clone();
180 assert!(!c.saw_prev_marked);
181 v.descend_into_self(&mut c);
182 assert!(c.saw_prev_marked);
184 if PRINT { println!(""); }
186 // Cycle 3: { hk0 -> hv0, hv0 -> hk0, hk1 -> hv1, hv1 -> hk1 };
187 // does not exercise `h` itself
189 let mut h: HashMap<H,H> = HashMap::new();
190 h.insert(Named::new("hk0"), Named::new("hv0"));
191 h.insert(Named::new("hk1"), Named::new("hv1"));
192 for (key, val) in h.iter() {
193 val.next.set(Some(key));
194 key.next.set(Some(val));
197 let mut c = c_orig.clone();
199 for (key, _) in h.iter() {
201 c.saw_prev_marked = false;
202 key.descend_into_self(&mut c);
203 assert!(c.saw_prev_marked);
206 if PRINT { println!(""); }
208 // Cycle 4: { h -> (hmk0,hmv0,hmk1,hmv1), {hmk0,hmv0,hmk1,hmv1} -> h }
210 let mut h: HashMap<HM,HM> = HashMap::new();
211 h.insert(Named::new("hmk0"), Named::new("hmv0"));
212 h.insert(Named::new("hmk0"), Named::new("hmv0"));
213 for (key, val) in h.iter() {
214 val.contents.set(Some(&h));
215 key.contents.set(Some(&h));
218 let mut c = c_orig.clone();
221 for (key, _) in h.iter() {
223 c.saw_prev_marked = false;
224 key.descend_into_self(&mut c);
225 assert!(c.saw_prev_marked);
229 if PRINT { println!(""); }
231 // Cycle 5: { vd[0] -> vd[1], vd[1] -> vd[0] };
232 // does not exercise vd itself
233 let mut vd: VecDeque<S> = VecDeque::new();
234 vd.push_back(Named::new("d0"));
235 vd.push_back(Named::new("d1"));
236 vd[0].next.set(Some(&vd[1]));
237 vd[1].next.set(Some(&vd[0]));
239 let mut c = c_orig.clone();
241 assert!(!c.saw_prev_marked);
242 vd[0].descend_into_self(&mut c);
243 assert!(c.saw_prev_marked);
245 if PRINT { println!(""); }
247 // Cycle 6: { vd -> (vd0, vd1), {vd0, vd1} -> vd }
248 let mut vd: VecDeque<VD> = VecDeque::new();
249 vd.push_back(Named::new("vd0"));
250 vd.push_back(Named::new("vd1"));
251 vd[0].contents.set(Some(&vd));
252 vd[1].contents.set(Some(&vd));
254 let mut c = c_orig.clone();
256 assert!(!c.saw_prev_marked);
257 vd[0].descend_into_self(&mut c);
258 assert!(c.saw_prev_marked);
260 if PRINT { println!(""); }
262 // Cycle 7: { vm -> (vm0, vm1), {vm0, vm1} -> vm }
263 let mut vm: HashMap<usize, VM> = HashMap::new();
264 vm.insert(0, Named::new("vm0"));
265 vm.insert(1, Named::new("vm1"));
266 vm[&0].contents.set(Some(&vm));
267 vm[&1].contents.set(Some(&vm));
269 let mut c = c_orig.clone();
271 assert!(!c.saw_prev_marked);
272 vm[&0].descend_into_self(&mut c);
273 assert!(c.saw_prev_marked);
275 if PRINT { println!(""); }
277 // Cycle 8: { ll -> (ll0, ll1), {ll0, ll1} -> ll }
278 let mut ll: LinkedList<LL> = LinkedList::new();
279 ll.push_back(Named::new("ll0"));
280 ll.push_back(Named::new("ll1"));
282 e.contents.set(Some(&ll));
285 let mut c = c_orig.clone();
289 c.saw_prev_marked = false;
290 e.descend_into_self(&mut c);
291 assert!(c.saw_prev_marked);
295 if PRINT { println!(""); }
297 // Cycle 9: { bh -> (bh0, bh1), {bh0, bh1} -> bh }
298 let mut bh: BinaryHeap<BH> = BinaryHeap::new();
299 bh.push(Named::new("bh0"));
300 bh.push(Named::new("bh1"));
302 b.contents.set(Some(&bh));
305 let mut c = c_orig.clone();
309 c.saw_prev_marked = false;
310 b.descend_into_self(&mut c);
311 assert!(c.saw_prev_marked);
315 if PRINT { println!(""); }
317 // Cycle 10: { btm -> (btk0, btv1), {bt0, bt1} -> btm }
318 let mut btm: BTreeMap<BTM, BTM> = BTreeMap::new();
319 btm.insert(Named::new("btk0"), Named::new("btv0"));
320 btm.insert(Named::new("btk1"), Named::new("btv1"));
321 for (k, v) in btm.iter() {
322 k.contents.set(Some(&btm));
323 v.contents.set(Some(&btm));
326 let mut c = c_orig.clone();
330 c.saw_prev_marked = false;
331 k.descend_into_self(&mut c);
332 assert!(c.saw_prev_marked);
336 if PRINT { println!(""); }
338 // Cycle 10: { bts -> (bts0, bts1), {bts0, bts1} -> btm }
339 let mut bts: BTreeSet<BTS> = BTreeSet::new();
340 bts.insert(Named::new("bts0"));
341 bts.insert(Named::new("bts1"));
342 for v in bts.iter() {
343 v.contents.set(Some(&bts));
346 let mut c = c_orig.clone();
350 c.saw_prev_marked = false;
351 b.descend_into_self(&mut c);
352 assert!(c.saw_prev_marked);
356 if PRINT { println!(""); }
358 // Cycle 11: { rc0 -> (rc1, rc2), rc1 -> (), rc2 -> rc0 }
359 let (rc0, rc1, rc2): (RCRC, RCRC, RCRC);
360 rc0 = RCRC::new("rcrc0");
361 rc1 = RCRC::new("rcrc1");
362 rc2 = RCRC::new("rcrc2");
363 rc0.0.borrow_mut().children.0 = Some(&rc1);
364 rc0.0.borrow_mut().children.1 = Some(&rc2);
365 rc2.0.borrow_mut().children.0 = Some(&rc0);
367 let mut c = c_orig.clone();
368 c.control_bits = 0b1;
370 assert!(!c.saw_prev_marked);
371 rc0.descend_into_self(&mut c);
372 assert!(c.saw_prev_marked);
374 if PRINT { println!(""); }
376 // We want to take the previous Rc case and generalize it to Arc.
378 // We can use refcells if we're single-threaded (as this test is).
379 // If one were to generalize these constructions to a
380 // multi-threaded context, then it might seem like we could choose
381 // between either a RwLock or a Mutex to hold the owned arcs on
384 // Part of the point of this test is to actually confirm that the
385 // cycle exists by traversing it. We can do that just fine with an
386 // RwLock (since we can grab the child pointers in read-only
387 // mode), but we cannot lock a std::sync::Mutex to guard reading
388 // from each node via the same pattern, since once you hit the
389 // cycle, you'll be trying to acquiring the same lock twice.
390 // (We deal with this by exiting the traversal early if try_lock fails.)
392 // Cycle 12: { arc0 -> (arc1, arc2), arc1 -> (), arc2 -> arc0 }, refcells
393 let (arc0, arc1, arc2): (ARCRC, ARCRC, ARCRC);
394 arc0 = ARCRC::new("arcrc0");
395 arc1 = ARCRC::new("arcrc1");
396 arc2 = ARCRC::new("arcrc2");
397 arc0.0.borrow_mut().children.0 = Some(&arc1);
398 arc0.0.borrow_mut().children.1 = Some(&arc2);
399 arc2.0.borrow_mut().children.0 = Some(&arc0);
401 let mut c = c_orig.clone();
402 c.control_bits = 0b1;
404 assert!(!c.saw_prev_marked);
405 arc0.descend_into_self(&mut c);
406 assert!(c.saw_prev_marked);
408 if PRINT { println!(""); }
410 // Cycle 13: { arc0 -> (arc1, arc2), arc1 -> (), arc2 -> arc0 }, rwlocks
411 let (arc0, arc1, arc2): (ARCRW, ARCRW, ARCRW);
412 arc0 = ARCRW::new("arcrw0");
413 arc1 = ARCRW::new("arcrw1");
414 arc2 = ARCRW::new("arcrw2");
415 arc0.0.write().unwrap().children.0 = Some(&arc1);
416 arc0.0.write().unwrap().children.1 = Some(&arc2);
417 arc2.0.write().unwrap().children.0 = Some(&arc0);
419 let mut c = c_orig.clone();
420 c.control_bits = 0b1;
422 assert!(!c.saw_prev_marked);
423 arc0.descend_into_self(&mut c);
424 assert!(c.saw_prev_marked);
426 if PRINT { println!(""); }
428 // Cycle 14: { arc0 -> (arc1, arc2), arc1 -> (), arc2 -> arc0 }, mutexs
429 let (arc0, arc1, arc2): (ARCM, ARCM, ARCM);
430 arc0 = ARCM::new("arcm0");
431 arc1 = ARCM::new("arcm1");
432 arc2 = ARCM::new("arcm2");
433 arc0.1.lock().unwrap().children.0 = Some(&arc1);
434 arc0.1.lock().unwrap().children.1 = Some(&arc2);
435 arc2.1.lock().unwrap().children.0 = Some(&arc0);
437 let mut c = c_orig.clone();
438 c.control_bits = 0b1;
440 assert!(!c.saw_prev_marked);
441 arc0.descend_into_self(&mut c);
442 assert!(c.saw_prev_marked);
446 fn new(_: &'static str) -> Self;
447 fn name(&self) -> &str;
452 fn set_mark(&self, mark: M);
458 next: Cell<Option<&'a S<'a>>>,
461 impl<'a> Named for S<'a> {
462 fn new(name: &'static str) -> S<'a> {
463 S { name: name, mark: Cell::new(0), next: Cell::new(None) }
465 fn name(&self) -> &str { self.name }
468 impl<'a> Marked<u32> for S<'a> {
469 fn mark(&self) -> u32 { self.mark.get() }
470 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
476 next: Cell<(Option<&'a S2<'a>>, Option<&'a S2<'a>>)>,
479 impl<'a> Named for S2<'a> {
480 fn new(name: &'static str) -> S2<'a> {
481 S2 { name: name, mark: Cell::new(0), next: Cell::new((None, None)) }
483 fn name(&self) -> &str { self.name }
486 impl<'a> Marked<u32> for S2<'a> {
487 fn mark(&self) -> u32 { self.mark.get() }
488 fn set_mark(&self, mark: u32) {
496 contents: Vec<Cell<Option<&'a V<'a>>>>,
499 impl<'a> Named for V<'a> {
500 fn new(name: &'static str) -> V<'a> {
503 contents: vec![Cell::new(None), Cell::new(None)]
506 fn name(&self) -> &str { self.name }
509 impl<'a> Marked<u32> for V<'a> {
510 fn mark(&self) -> u32 { self.mark.get() }
511 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
518 next: Cell<Option<&'a H<'a>>>,
521 impl<'a> Named for H<'a> {
522 fn new(name: &'static str) -> H<'a> {
523 H { name: name, mark: Cell::new(0), next: Cell::new(None) }
525 fn name(&self) -> &str { self.name }
528 impl<'a> Marked<u32> for H<'a> {
529 fn mark(&self) -> u32 { self.mark.get() }
530 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
533 impl<'a> PartialEq for H<'a> {
534 fn eq(&self, rhs: &H<'a>) -> bool {
535 self.name == rhs.name
539 impl<'a> Hash for H<'a> {
540 fn hash<H: Hasher>(&self, state: &mut H) {
541 self.name.hash(state)
549 contents: Cell<Option<&'a HashMap<HM<'a>, HM<'a>>>>,
552 impl<'a> Named for HM<'a> {
553 fn new(name: &'static str) -> HM<'a> {
556 contents: Cell::new(None)
559 fn name(&self) -> &str { self.name }
562 impl<'a> Marked<u32> for HM<'a> {
563 fn mark(&self) -> u32 { self.mark.get() }
564 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
567 impl<'a> PartialEq for HM<'a> {
568 fn eq(&self, rhs: &HM<'a>) -> bool {
569 self.name == rhs.name
573 impl<'a> Hash for HM<'a> {
574 fn hash<H: Hasher>(&self, state: &mut H) {
575 self.name.hash(state)
583 contents: Cell<Option<&'a VecDeque<VD<'a>>>>,
586 impl<'a> Named for VD<'a> {
587 fn new(name: &'static str) -> VD<'a> {
590 contents: Cell::new(None)
593 fn name(&self) -> &str { self.name }
596 impl<'a> Marked<u32> for VD<'a> {
597 fn mark(&self) -> u32 { self.mark.get() }
598 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
604 contents: Cell<Option<&'a HashMap<usize, VM<'a>>>>,
607 impl<'a> Named for VM<'a> {
608 fn new(name: &'static str) -> VM<'a> {
611 contents: Cell::new(None)
614 fn name(&self) -> &str { self.name }
617 impl<'a> Marked<u32> for VM<'a> {
618 fn mark(&self) -> u32 { self.mark.get() }
619 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
625 contents: Cell<Option<&'a LinkedList<LL<'a>>>>,
628 impl<'a> Named for LL<'a> {
629 fn new(name: &'static str) -> LL<'a> {
632 contents: Cell::new(None)
635 fn name(&self) -> &str { self.name }
638 impl<'a> Marked<u32> for LL<'a> {
639 fn mark(&self) -> u32 { self.mark.get() }
640 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
646 contents: Cell<Option<&'a BinaryHeap<BH<'a>>>>,
649 impl<'a> Named for BH<'a> {
650 fn new(name: &'static str) -> BH<'a> {
653 contents: Cell::new(None)
656 fn name(&self) -> &str { self.name }
659 impl<'a> Marked<u32> for BH<'a> {
660 fn mark(&self) -> u32 { self.mark.get() }
661 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
664 impl<'a> Eq for BH<'a> { }
666 impl<'a> PartialEq for BH<'a> {
667 fn eq(&self, rhs: &BH<'a>) -> bool {
668 self.name == rhs.name
672 impl<'a> PartialOrd for BH<'a> {
673 fn partial_cmp(&self, rhs: &BH<'a>) -> Option<Ordering> {
678 impl<'a> Ord for BH<'a> {
679 fn cmp(&self, rhs: &BH<'a>) -> Ordering {
680 self.name.cmp(rhs.name)
687 contents: Cell<Option<&'a BTreeMap<BTM<'a>, BTM<'a>>>>,
690 impl<'a> Named for BTM<'a> {
691 fn new(name: &'static str) -> BTM<'a> {
694 contents: Cell::new(None)
697 fn name(&self) -> &str { self.name }
700 impl<'a> Marked<u32> for BTM<'a> {
701 fn mark(&self) -> u32 { self.mark.get() }
702 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
705 impl<'a> Eq for BTM<'a> { }
707 impl<'a> PartialEq for BTM<'a> {
708 fn eq(&self, rhs: &BTM<'a>) -> bool {
709 self.name == rhs.name
713 impl<'a> PartialOrd for BTM<'a> {
714 fn partial_cmp(&self, rhs: &BTM<'a>) -> Option<Ordering> {
719 impl<'a> Ord for BTM<'a> {
720 fn cmp(&self, rhs: &BTM<'a>) -> Ordering {
721 self.name.cmp(rhs.name)
728 contents: Cell<Option<&'a BTreeSet<BTS<'a>>>>,
731 impl<'a> Named for BTS<'a> {
732 fn new(name: &'static str) -> BTS<'a> {
735 contents: Cell::new(None)
738 fn name(&self) -> &str { self.name }
741 impl<'a> Marked<u32> for BTS<'a> {
742 fn mark(&self) -> u32 { self.mark.get() }
743 fn set_mark(&self, mark: u32) { self.mark.set(mark); }
746 impl<'a> Eq for BTS<'a> { }
748 impl<'a> PartialEq for BTS<'a> {
749 fn eq(&self, rhs: &BTS<'a>) -> bool {
750 self.name == rhs.name
754 impl<'a> PartialOrd for BTS<'a> {
755 fn partial_cmp(&self, rhs: &BTS<'a>) -> Option<Ordering> {
760 impl<'a> Ord for BTS<'a> {
761 fn cmp(&self, rhs: &BTS<'a>) -> Ordering {
762 self.name.cmp(rhs.name)
767 struct RCRCData<'a> {
770 children: (Option<&'a RCRC<'a>>, Option<&'a RCRC<'a>>),
773 struct RCRC<'a>(Rc<RefCell<RCRCData<'a>>>);
775 impl<'a> Named for RCRC<'a> {
776 fn new(name: &'static str) -> Self {
777 RCRC(Rc::new(RefCell::new(RCRCData {
778 name: name, mark: Cell::new(0), children: (None, None), })))
780 fn name(&self) -> &str { self.0.borrow().name }
783 impl<'a> Marked<u32> for RCRC<'a> {
784 fn mark(&self) -> u32 { self.0.borrow().mark.get() }
785 fn set_mark(&self, mark: u32) { self.0.borrow().mark.set(mark); }
788 impl<'a> Children<'a> for RCRC<'a> {
789 fn count_children(&self) -> usize { 2 }
790 fn descend_one_child<C>(&self, context: &mut C, index: usize)
791 where C: Context + PrePost<Self>, Self: Sized
793 let children = &self.0.borrow().children;
794 let child = match index {
795 0 => if let Some(child) = children.0 { child } else { return; },
796 1 => if let Some(child) = children.1 { child } else { return; },
797 _ => panic!("bad children"),
799 // println!("S2 {} descending into child {} at index {}", self.name, child.name, index);
800 child.descend_into_self(context);
804 struct ARCRCData<'a> {
807 children: (Option<&'a ARCRC<'a>>, Option<&'a ARCRC<'a>>),
810 struct ARCRC<'a>(Arc<RefCell<ARCRCData<'a>>>);
812 impl<'a> Named for ARCRC<'a> {
813 fn new(name: &'static str) -> Self {
814 ARCRC(Arc::new(RefCell::new(ARCRCData {
815 name: name, mark: Cell::new(0), children: (None, None), })))
817 fn name(&self) -> &str { self.0.borrow().name }
820 impl<'a> Marked<u32> for ARCRC<'a> {
821 fn mark(&self) -> u32 { self.0.borrow().mark.get() }
822 fn set_mark(&self, mark: u32) { self.0.borrow().mark.set(mark); }
825 impl<'a> Children<'a> for ARCRC<'a> {
826 fn count_children(&self) -> usize { 2 }
827 fn descend_one_child<C>(&self, context: &mut C, index: usize)
828 where C: Context + PrePost<Self>, Self: Sized
830 let children = &self.0.borrow().children;
832 0 => if let Some(ref child) = children.0 {
833 child.descend_into_self(context);
835 1 => if let Some(ref child) = children.1 {
836 child.descend_into_self(context);
838 _ => panic!("bad children!"),
844 struct ARCMData<'a> {
846 children: (Option<&'a ARCM<'a>>, Option<&'a ARCM<'a>>),
850 struct ARCM<'a>(&'static str, Arc<Mutex<ARCMData<'a>>>);
852 impl<'a> Named for ARCM<'a> {
853 fn new(name: &'static str) -> Self {
854 ARCM(name, Arc::new(Mutex::new(ARCMData {
855 mark: Cell::new(0), children: (None, None), })))
857 fn name(&self) -> &str { self.0 }
860 impl<'a> Marked<u32> for ARCM<'a> {
861 fn mark(&self) -> u32 { self.1.lock().unwrap().mark.get() }
862 fn set_mark(&self, mark: u32) { self.1.lock().unwrap().mark.set(mark); }
865 impl<'a> Children<'a> for ARCM<'a> {
866 fn count_children(&self) -> usize { 2 }
867 fn descend_one_child<C>(&self, context: &mut C, index: usize)
868 where C: Context + PrePost<Self>, Self: Sized
870 let ref children = if let Ok(data) = self.1.try_lock() {
874 0 => if let Some(ref child) = children.0 {
875 child.descend_into_self(context);
877 1 => if let Some(ref child) = children.1 {
878 child.descend_into_self(context);
880 _ => panic!("bad children!"),
886 struct ARCRWData<'a> {
889 children: (Option<&'a ARCRW<'a>>, Option<&'a ARCRW<'a>>),
893 struct ARCRW<'a>(Arc<RwLock<ARCRWData<'a>>>);
895 impl<'a> Named for ARCRW<'a> {
896 fn new(name: &'static str) -> Self {
897 ARCRW(Arc::new(RwLock::new(ARCRWData {
898 name: name, mark: Cell::new(0), children: (None, None), })))
900 fn name(&self) -> &str { self.0.read().unwrap().name }
903 impl<'a> Marked<u32> for ARCRW<'a> {
904 fn mark(&self) -> u32 { self.0.read().unwrap().mark.get() }
905 fn set_mark(&self, mark: u32) { self.0.read().unwrap().mark.set(mark); }
908 impl<'a> Children<'a> for ARCRW<'a> {
909 fn count_children(&self) -> usize { 2 }
910 fn descend_one_child<C>(&self, context: &mut C, index: usize)
911 where C: Context + PrePost<Self>, Self: Sized
913 let children = &self.0.read().unwrap().children;
915 0 => if let Some(ref child) = children.0 {
916 child.descend_into_self(context);
918 1 => if let Some(ref child) = children.1 {
919 child.descend_into_self(context);
921 _ => panic!("bad children!"),
927 fn next_index(&mut self, len: usize) -> usize;
928 fn should_act(&self) -> bool;
929 fn increase_visited(&mut self);
930 fn increase_skipped(&mut self);
931 fn increase_depth(&mut self);
932 fn decrease_depth(&mut self);
936 fn pre(&mut self, _: &T);
937 fn post(&mut self, _: &T);
938 fn hit_limit(&mut self, _: &T);
942 fn count_children(&self) -> usize;
943 fn descend_one_child<C>(&self, context: &mut C, index: usize)
944 where C: Context + PrePost<Self>, Self: Sized;
946 fn next_child<C>(&self, context: &mut C)
947 where C: Context + PrePost<Self>, Self: Sized
949 let index = context.next_index(self.count_children());
950 self.descend_one_child(context, index);
953 fn descend_into_self<C>(&self, context: &mut C)
954 where C: Context + PrePost<Self>, Self: Sized
957 if context.should_act() {
958 context.increase_visited();
959 context.increase_depth();
960 self.next_child(context);
961 context.decrease_depth();
963 context.hit_limit(self);
964 context.increase_skipped();
969 fn descend<'b, C>(&self, c: &Cell<Option<&'b Self>>, context: &mut C)
970 where C: Context + PrePost<Self>, Self: Sized
972 if let Some(r) = c.get() {
973 r.descend_into_self(context);
978 impl<'a> Children<'a> for S<'a> {
979 fn count_children(&self) -> usize { 1 }
980 fn descend_one_child<C>(&self, context: &mut C, _: usize)
981 where C: Context + PrePost<Self>, Self: Sized {
982 self.descend(&self.next, context);
986 impl<'a> Children<'a> for S2<'a> {
987 fn count_children(&self) -> usize { 2 }
988 fn descend_one_child<C>(&self, context: &mut C, index: usize)
989 where C: Context + PrePost<Self>, Self: Sized
991 let children = self.next.get();
992 let child = match index {
993 0 => if let Some(child) = children.0 { child } else { return; },
994 1 => if let Some(child) = children.1 { child } else { return; },
995 _ => panic!("bad children"),
997 // println!("S2 {} descending into child {} at index {}", self.name, child.name, index);
998 child.descend_into_self(context);
1002 impl<'a> Children<'a> for V<'a> {
1003 fn count_children(&self) -> usize { self.contents.len() }
1004 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1005 where C: Context + PrePost<Self>, Self: Sized
1007 if let Some(child) = self.contents[index].get() {
1008 child.descend_into_self(context);
1013 impl<'a> Children<'a> for H<'a> {
1014 fn count_children(&self) -> usize { 1 }
1015 fn descend_one_child<C>(&self, context: &mut C, _: usize)
1016 where C: Context + PrePost<Self>, Self: Sized
1018 self.descend(&self.next, context);
1022 impl<'a> Children<'a> for HM<'a> {
1023 fn count_children(&self) -> usize {
1024 if let Some(m) = self.contents.get() { 2 * m.iter().count() } else { 0 }
1026 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1027 where C: Context + PrePost<Self>, Self: Sized
1029 if let Some(ref hm) = self.contents.get() {
1030 for (k, v) in hm.iter().nth(index / 2) {
1031 [k, v][index % 2].descend_into_self(context);
1037 impl<'a> Children<'a> for VD<'a> {
1038 fn count_children(&self) -> usize {
1039 if let Some(d) = self.contents.get() { d.iter().count() } else { 0 }
1041 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1042 where C: Context + PrePost<Self>, Self: Sized
1044 if let Some(ref vd) = self.contents.get() {
1045 for r in vd.iter().nth(index) {
1046 r.descend_into_self(context);
1052 impl<'a> Children<'a> for VM<'a> {
1053 fn count_children(&self) -> usize {
1054 if let Some(m) = self.contents.get() { m.iter().count() } else { 0 }
1056 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1057 where C: Context + PrePost<VM<'a>>
1059 if let Some(ref vd) = self.contents.get() {
1060 for (_idx, r) in vd.iter().nth(index) {
1061 r.descend_into_self(context);
1067 impl<'a> Children<'a> for LL<'a> {
1068 fn count_children(&self) -> usize {
1069 if let Some(l) = self.contents.get() { l.iter().count() } else { 0 }
1071 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1072 where C: Context + PrePost<LL<'a>>
1074 if let Some(ref ll) = self.contents.get() {
1075 for r in ll.iter().nth(index) {
1076 r.descend_into_self(context);
1082 impl<'a> Children<'a> for BH<'a> {
1083 fn count_children(&self) -> usize {
1084 if let Some(h) = self.contents.get() { h.iter().count() } else { 0 }
1086 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1087 where C: Context + PrePost<BH<'a>>
1089 if let Some(ref bh) = self.contents.get() {
1090 for r in bh.iter().nth(index) {
1091 r.descend_into_self(context);
1097 impl<'a> Children<'a> for BTM<'a> {
1098 fn count_children(&self) -> usize {
1099 if let Some(m) = self.contents.get() { 2 * m.iter().count() } else { 0 }
1101 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1102 where C: Context + PrePost<BTM<'a>>
1104 if let Some(ref bh) = self.contents.get() {
1105 for (k, v) in bh.iter().nth(index / 2) {
1106 [k, v][index % 2].descend_into_self(context);
1112 impl<'a> Children<'a> for BTS<'a> {
1113 fn count_children(&self) -> usize {
1114 if let Some(s) = self.contents.get() { s.iter().count() } else { 0 }
1116 fn descend_one_child<C>(&self, context: &mut C, index: usize)
1117 where C: Context + PrePost<BTS<'a>>
1119 if let Some(ref bh) = self.contents.get() {
1120 for r in bh.iter().nth(index) {
1121 r.descend_into_self(context);
1127 #[derive(Copy, Clone)]
1128 struct ContextData {
1135 saw_prev_marked: bool,
1139 impl Context for ContextData {
1140 fn next_index(&mut self, len: usize) -> usize {
1141 if len < 2 { return 0; }
1142 let mut pow2 = len.next_power_of_two();
1143 let _pow2_orig = pow2;
1145 let mut bits = self.control_bits;
1147 idx = (idx << 1) | (bits & 1) as usize;
1152 // println!("next_index({} [{:b}]) says {}, pre(bits): {:b} post(bits): {:b}",
1153 // len, _pow2_orig, idx, self.control_bits, bits);
1154 self.control_bits = bits;
1157 fn should_act(&self) -> bool {
1158 self.curr_depth < self.max_depth && self.visited < self.max_visits
1160 fn increase_visited(&mut self) { self.visited += 1; }
1161 fn increase_skipped(&mut self) { self.skipped += 1; }
1162 fn increase_depth(&mut self) { self.curr_depth += 1; }
1163 fn decrease_depth(&mut self) { self.curr_depth -= 1; }
1166 impl<T:Named+Marked<u32>> PrePost<T> for ContextData {
1167 fn pre(&mut self, t: &T) {
1168 for _ in 0..self.curr_depth {
1169 if PRINT { print!(" "); }
1171 if PRINT { println!("prev {}", t.name()); }
1172 if t.mark() == self.curr_mark {
1173 for _ in 0..self.curr_depth {
1174 if PRINT { print!(" "); }
1176 if PRINT { println!("(probably previously marked)"); }
1177 self.saw_prev_marked = true;
1179 t.set_mark(self.curr_mark);
1181 fn post(&mut self, t: &T) {
1182 for _ in 0..self.curr_depth {
1183 if PRINT { print!(" "); }
1185 if PRINT { println!("post {}", t.name()); }
1187 fn hit_limit(&mut self, t: &T) {
1188 for _ in 0..self.curr_depth {
1189 if PRINT { print!(" "); }
1191 if PRINT { println!("LIMIT {}", t.name()); }