1 use super::map::MIN_LEN;
2 use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root};
3 use core::alloc::Allocator;
5 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
6 /// Stocks up a possibly underfull node by merging with or stealing from a
7 /// sibling. If successful but at the cost of shrinking the parent node,
8 /// returns that shrunk parent node. Returns an `Err` if the node is
10 fn fix_node_through_parent<A: Allocator + Clone>(
13 ) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
18 match self.choose_parent_kv() {
19 Ok(Left(mut left_parent_kv)) => {
20 if left_parent_kv.can_merge() {
21 let parent = left_parent_kv.merge_tracking_parent(alloc);
24 left_parent_kv.bulk_steal_left(MIN_LEN - len);
28 Ok(Right(mut right_parent_kv)) => {
29 if right_parent_kv.can_merge() {
30 let parent = right_parent_kv.merge_tracking_parent(alloc);
33 right_parent_kv.bulk_steal_right(MIN_LEN - len);
49 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
50 /// Stocks up a possibly underfull node, and if that causes its parent node
51 /// to shrink, stocks up the parent, recursively.
52 /// Returns `true` if it fixed the tree, `false` if it couldn't because the
53 /// root node became empty.
55 /// This method does not expect ancestors to already be underfull upon entry
56 /// and panics if it encounters an empty ancestor.
57 pub fn fix_node_and_affected_ancestors<A: Allocator + Clone>(mut self, alloc: A) -> bool {
59 match self.fix_node_through_parent(alloc.clone()) {
60 Ok(Some(parent)) => self = parent.forget_type(),
61 Ok(None) => return true,
62 Err(_) => return false,
68 impl<K, V> Root<K, V> {
69 /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
70 pub fn fix_top<A: Allocator + Clone>(&mut self, alloc: A) {
71 while self.height() > 0 && self.len() == 0 {
72 self.pop_internal_level(alloc.clone());
76 /// Stocks up or merge away any underfull nodes on the right border of the
77 /// tree. The other nodes, those that are not the root nor a rightmost edge,
78 /// must already have at least MIN_LEN elements.
79 pub fn fix_right_border<A: Allocator + Clone>(&mut self, alloc: A) {
80 self.fix_top(alloc.clone());
82 self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone());
87 /// The symmetric clone of `fix_right_border`.
88 pub fn fix_left_border<A: Allocator + Clone>(&mut self, alloc: A) {
89 self.fix_top(alloc.clone());
91 self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone());
96 /// Stocks up any underfull nodes on the right border of the tree.
97 /// The other nodes, those that are neither the root nor a rightmost edge,
98 /// must be prepared to have up to MIN_LEN elements stolen.
99 pub fn fix_right_border_of_plentiful(&mut self) {
100 let mut cur_node = self.borrow_mut();
101 while let Internal(internal) = cur_node.force() {
102 // Check if right-most child is underfull.
103 let mut last_kv = internal.last_kv().consider_for_balancing();
104 debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2);
105 let right_child_len = last_kv.right_child_len();
106 if right_child_len < MIN_LEN {
108 last_kv.bulk_steal_left(MIN_LEN - right_child_len);
112 cur_node = last_kv.into_right_child();
117 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
118 fn fix_left_border_of_left_edge<A: Allocator + Clone>(mut self, alloc: A) {
119 while let Internal(internal_kv) = self.force() {
120 self = internal_kv.fix_left_child(alloc.clone()).first_kv();
121 debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
125 fn fix_right_border_of_right_edge<A: Allocator + Clone>(mut self, alloc: A) {
126 while let Internal(internal_kv) = self.force() {
127 self = internal_kv.fix_right_child(alloc.clone()).last_kv();
128 debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
133 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
134 /// Stocks up the left child, assuming the right child isn't underfull, and
135 /// provisions an extra element to allow merging its children in turn
136 /// without becoming underfull.
137 /// Returns the left child.
138 fn fix_left_child<A: Allocator + Clone>(
141 ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
142 let mut internal_kv = self.consider_for_balancing();
143 let left_len = internal_kv.left_child_len();
144 debug_assert!(internal_kv.right_child_len() >= MIN_LEN);
145 if internal_kv.can_merge() {
146 internal_kv.merge_tracking_child(alloc)
148 // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
149 let count = (MIN_LEN + 1).saturating_sub(left_len);
151 internal_kv.bulk_steal_right(count);
153 internal_kv.into_left_child()
157 /// Stocks up the right child, assuming the left child isn't underfull, and
158 /// provisions an extra element to allow merging its children in turn
159 /// without becoming underfull.
160 /// Returns wherever the right child ended up.
161 fn fix_right_child<A: Allocator + Clone>(
164 ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
165 let mut internal_kv = self.consider_for_balancing();
166 let right_len = internal_kv.right_child_len();
167 debug_assert!(internal_kv.left_child_len() >= MIN_LEN);
168 if internal_kv.can_merge() {
169 internal_kv.merge_tracking_child(alloc)
171 // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
172 let count = (MIN_LEN + 1).saturating_sub(right_len);
174 internal_kv.bulk_steal_left(count);
176 internal_kv.into_right_child()