1use super::map::MIN_LEN;
2use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root};
3use core::alloc::Allocator;
4
5impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
6 /// Stocks up a possibly underfull node by merging with or stealing from a
7 /// sibling. If successful but at the cost of shrinking the parent node,
8 /// returns that shrunk parent node. Returns an `Err` if the node is
9 /// an empty root.
10 fn fix_node_through_parent<A: Allocator + Clone>(
11 self,
12 alloc: A,
13 ) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
14 let len = self.len();
15 if len >= MIN_LEN {
16 Ok(None)
17 } else {
18 match self.choose_parent_kv() {
19 Ok(Left(mut left_parent_kv)) => {
20 if left_parent_kv.can_merge() {
21 let parent = left_parent_kv.merge_tracking_parent(alloc);
22 Ok(Some(parent))
23 } else {
24 left_parent_kv.bulk_steal_left(MIN_LEN - len);
25 Ok(None)
26 }
27 }
28 Ok(Right(mut right_parent_kv)) => {
29 if right_parent_kv.can_merge() {
30 let parent = right_parent_kv.merge_tracking_parent(alloc);
31 Ok(Some(parent))
32 } else {
33 right_parent_kv.bulk_steal_right(MIN_LEN - len);
34 Ok(None)
35 }
36 }
37 Err(root) => {
38 if len > 0 {
39 Ok(None)
40 } else {
41 Err(root)
42 }
43 }
44 }
45 }
46 }
47}
48
49impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
50 /// Stocks up a possibly underfull node, and if that causes its parent node
51 /// to shrink, stocks up the parent, recursively.
52 /// Returns `true` if it fixed the tree, `false` if it couldn't because the
53 /// root node became empty.
54 ///
55 /// This method does not expect ancestors to already be underfull upon entry
56 /// and panics if it encounters an empty ancestor.
57 pub fn fix_node_and_affected_ancestors<A: Allocator + Clone>(mut self, alloc: A) -> bool {
58 loop {
59 match self.fix_node_through_parent(alloc.clone()) {
60 Ok(Some(parent: NodeRef, K, V, Internal>)) => self = parent.forget_type(),
61 Ok(None) => return true,
62 Err(_) => return false,
63 }
64 }
65 }
66}
67
68impl<K, V> Root<K, V> {
69 /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
70 pub fn fix_top<A: Allocator + Clone>(&mut self, alloc: A) {
71 while self.height() > 0 && self.len() == 0 {
72 self.pop_internal_level(alloc.clone());
73 }
74 }
75
76 /// Stocks up or merge away any underfull nodes on the right border of the
77 /// tree. The other nodes, those that are not the root nor a rightmost edge,
78 /// must already have at least MIN_LEN elements.
79 pub fn fix_right_border<A: Allocator + Clone>(&mut self, alloc: A) {
80 self.fix_top(alloc.clone());
81 if self.len() > 0 {
82 self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone());
83 self.fix_top(alloc);
84 }
85 }
86
87 /// The symmetric clone of `fix_right_border`.
88 pub fn fix_left_border<A: Allocator + Clone>(&mut self, alloc: A) {
89 self.fix_top(alloc.clone());
90 if self.len() > 0 {
91 self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone());
92 self.fix_top(alloc);
93 }
94 }
95
96 /// Stocks up any underfull nodes on the right border of the tree.
97 /// The other nodes, those that are neither the root nor a rightmost edge,
98 /// must be prepared to have up to MIN_LEN elements stolen.
99 pub fn fix_right_border_of_plentiful(&mut self) {
100 let mut cur_node = self.borrow_mut();
101 while let Internal(internal) = cur_node.force() {
102 // Check if right-most child is underfull.
103 let mut last_kv = internal.last_kv().consider_for_balancing();
104 debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2);
105 let right_child_len = last_kv.right_child_len();
106 if right_child_len < MIN_LEN {
107 // We need to steal.
108 last_kv.bulk_steal_left(MIN_LEN - right_child_len);
109 }
110
111 // Go further down.
112 cur_node = last_kv.into_right_child();
113 }
114 }
115}
116
117impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
118 fn fix_left_border_of_left_edge<A: Allocator + Clone>(mut self, alloc: A) {
119 while let Internal(internal_kv: Handle, K, …, …>, …>) = self.force() {
120 self = internal_kv.fix_left_child(alloc.clone()).first_kv();
121 debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
122 }
123 }
124
125 fn fix_right_border_of_right_edge<A: Allocator + Clone>(mut self, alloc: A) {
126 while let Internal(internal_kv: Handle, K, …, …>, …>) = self.force() {
127 self = internal_kv.fix_right_child(alloc.clone()).last_kv();
128 debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
129 }
130 }
131}
132
133impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
134 /// Stocks up the left child, assuming the right child isn't underfull, and
135 /// provisions an extra element to allow merging its children in turn
136 /// without becoming underfull.
137 /// Returns the left child.
138 fn fix_left_child<A: Allocator + Clone>(
139 self,
140 alloc: A,
141 ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
142 let mut internal_kv = self.consider_for_balancing();
143 let left_len = internal_kv.left_child_len();
144 debug_assert!(internal_kv.right_child_len() >= MIN_LEN);
145 if internal_kv.can_merge() {
146 internal_kv.merge_tracking_child(alloc)
147 } else {
148 // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
149 let count = (MIN_LEN + 1).saturating_sub(left_len);
150 if count > 0 {
151 internal_kv.bulk_steal_right(count);
152 }
153 internal_kv.into_left_child()
154 }
155 }
156
157 /// Stocks up the right child, assuming the left child isn't underfull, and
158 /// provisions an extra element to allow merging its children in turn
159 /// without becoming underfull.
160 /// Returns wherever the right child ended up.
161 fn fix_right_child<A: Allocator + Clone>(
162 self,
163 alloc: A,
164 ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
165 let mut internal_kv = self.consider_for_balancing();
166 let right_len = internal_kv.right_child_len();
167 debug_assert!(internal_kv.left_child_len() >= MIN_LEN);
168 if internal_kv.can_merge() {
169 internal_kv.merge_tracking_child(alloc)
170 } else {
171 // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
172 let count = (MIN_LEN + 1).saturating_sub(right_len);
173 if count > 0 {
174 internal_kv.bulk_steal_left(count);
175 }
176 internal_kv.into_right_child()
177 }
178 }
179}
180