1use core::borrow::Borrow;
2use core::hint;
3use core::ops::RangeBounds;
4use core::ptr;
5
6use super::node::{marker, ForceResult::*, Handle, NodeRef};
7use super::search::SearchBound;
8
9use crate::alloc::Allocator;
10// `front` and `back` are always both `None` or both `Some`.
11pub struct LeafRange<BorrowType, K, V> {
12 front: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
13 back: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
14}
15
16impl<'a, K: 'a, V: 'a> Clone for LeafRange<marker::Immut<'a>, K, V> {
17 fn clone(&self) -> Self {
18 LeafRange { front: self.front.clone(), back: self.back.clone() }
19 }
20}
21
22impl<B, K, V> Default for LeafRange<B, K, V> {
23 fn default() -> Self {
24 LeafRange { front: None, back: None }
25 }
26}
27
28impl<BorrowType, K, V> LeafRange<BorrowType, K, V> {
29 pub fn none() -> Self {
30 LeafRange { front: None, back: None }
31 }
32
33 fn is_empty(&self) -> bool {
34 self.front == self.back
35 }
36
37 /// Temporarily takes out another, immutable equivalent of the same range.
38 pub fn reborrow(&self) -> LeafRange<marker::Immut<'_>, K, V> {
39 LeafRange {
40 front: self.front.as_ref().map(|f: &Handle, …>| f.reborrow()),
41 back: self.back.as_ref().map(|b: &Handle, …>| b.reborrow()),
42 }
43 }
44}
45
46impl<'a, K, V> LeafRange<marker::Immut<'a>, K, V> {
47 #[inline]
48 pub fn next_checked(&mut self) -> Option<(&'a K, &'a V)> {
49 self.perform_next_checked(|kv: &Handle, …, …, …>, …>| kv.into_kv())
50 }
51
52 #[inline]
53 pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a V)> {
54 self.perform_next_back_checked(|kv: &Handle, …, …, …>, …>| kv.into_kv())
55 }
56}
57
58impl<'a, K, V> LeafRange<marker::ValMut<'a>, K, V> {
59 #[inline]
60 pub fn next_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
61 self.perform_next_checked(|kv: &Handle, …, …, …>, …>| unsafe { ptr::read(src:kv) }.into_kv_valmut())
62 }
63
64 #[inline]
65 pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
66 self.perform_next_back_checked(|kv: &Handle, …, …, …>, …>| unsafe { ptr::read(src:kv) }.into_kv_valmut())
67 }
68}
69
70impl<BorrowType: marker::BorrowType, K, V> LeafRange<BorrowType, K, V> {
71 /// If possible, extract some result from the following KV and move to the edge beyond it.
72 fn perform_next_checked<F, R>(&mut self, f: F) -> Option<R>
73 where
74 F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
75 {
76 if self.is_empty() {
77 None
78 } else {
79 super::mem::replace(self.front.as_mut().unwrap(), |front| {
80 let kv = front.next_kv().ok().unwrap();
81 let result = f(&kv);
82 (kv.next_leaf_edge(), Some(result))
83 })
84 }
85 }
86
87 /// If possible, extract some result from the preceding KV and move to the edge beyond it.
88 fn perform_next_back_checked<F, R>(&mut self, f: F) -> Option<R>
89 where
90 F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
91 {
92 if self.is_empty() {
93 None
94 } else {
95 super::mem::replace(self.back.as_mut().unwrap(), |back| {
96 let kv = back.next_back_kv().ok().unwrap();
97 let result = f(&kv);
98 (kv.next_back_leaf_edge(), Some(result))
99 })
100 }
101 }
102}
103
104enum LazyLeafHandle<BorrowType, K, V> {
105 Root(NodeRef<BorrowType, K, V, marker::LeafOrInternal>), // not yet descended
106 Edge(Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>),
107}
108
109impl<'a, K: 'a, V: 'a> Clone for LazyLeafHandle<marker::Immut<'a>, K, V> {
110 fn clone(&self) -> Self {
111 match self {
112 LazyLeafHandle::Root(root: &NodeRef, K, V, …>) => LazyLeafHandle::Root(*root),
113 LazyLeafHandle::Edge(edge: &Handle, …, …, …>, …>) => LazyLeafHandle::Edge(*edge),
114 }
115 }
116}
117
118impl<BorrowType, K, V> LazyLeafHandle<BorrowType, K, V> {
119 fn reborrow(&self) -> LazyLeafHandle<marker::Immut<'_>, K, V> {
120 match self {
121 LazyLeafHandle::Root(root: &NodeRef) => LazyLeafHandle::Root(root.reborrow()),
122 LazyLeafHandle::Edge(edge: &Handle, …>) => LazyLeafHandle::Edge(edge.reborrow()),
123 }
124 }
125}
126
127// `front` and `back` are always both `None` or both `Some`.
128pub struct LazyLeafRange<BorrowType, K, V> {
129 front: Option<LazyLeafHandle<BorrowType, K, V>>,
130 back: Option<LazyLeafHandle<BorrowType, K, V>>,
131}
132
133impl<B, K, V> Default for LazyLeafRange<B, K, V> {
134 fn default() -> Self {
135 LazyLeafRange { front: None, back: None }
136 }
137}
138
139impl<'a, K: 'a, V: 'a> Clone for LazyLeafRange<marker::Immut<'a>, K, V> {
140 fn clone(&self) -> Self {
141 LazyLeafRange { front: self.front.clone(), back: self.back.clone() }
142 }
143}
144
145impl<BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
146 pub fn none() -> Self {
147 LazyLeafRange { front: None, back: None }
148 }
149
150 /// Temporarily takes out another, immutable equivalent of the same range.
151 pub fn reborrow(&self) -> LazyLeafRange<marker::Immut<'_>, K, V> {
152 LazyLeafRange {
153 front: self.front.as_ref().map(|f: &LazyLeafHandle| f.reborrow()),
154 back: self.back.as_ref().map(|b: &LazyLeafHandle| b.reborrow()),
155 }
156 }
157}
158
159impl<'a, K, V> LazyLeafRange<marker::Immut<'a>, K, V> {
160 #[inline]
161 pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
162 unsafe { self.init_front().unwrap().next_unchecked() }
163 }
164
165 #[inline]
166 pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
167 unsafe { self.init_back().unwrap().next_back_unchecked() }
168 }
169}
170
171impl<'a, K, V> LazyLeafRange<marker::ValMut<'a>, K, V> {
172 #[inline]
173 pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
174 unsafe { self.init_front().unwrap().next_unchecked() }
175 }
176
177 #[inline]
178 pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
179 unsafe { self.init_back().unwrap().next_back_unchecked() }
180 }
181}
182
183impl<K, V> LazyLeafRange<marker::Dying, K, V> {
184 fn take_front(
185 &mut self,
186 ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>> {
187 match self.front.take()? {
188 LazyLeafHandle::Root(root) => Some(root.first_leaf_edge()),
189 LazyLeafHandle::Edge(edge) => Some(edge),
190 }
191 }
192
193 #[inline]
194 pub unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
195 &mut self,
196 alloc: A,
197 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
198 debug_assert!(self.front.is_some());
199 let front = self.init_front().unwrap();
200 unsafe { front.deallocating_next_unchecked(alloc) }
201 }
202
203 #[inline]
204 pub unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
205 &mut self,
206 alloc: A,
207 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
208 debug_assert!(self.back.is_some());
209 let back = self.init_back().unwrap();
210 unsafe { back.deallocating_next_back_unchecked(alloc) }
211 }
212
213 #[inline]
214 pub fn deallocating_end<A: Allocator + Clone>(&mut self, alloc: A) {
215 if let Some(front) = self.take_front() {
216 front.deallocating_end(alloc)
217 }
218 }
219}
220
221impl<BorrowType: marker::BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
222 fn init_front(
223 &mut self,
224 ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
225 if let Some(LazyLeafHandle::Root(root)) = &self.front {
226 self.front = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.first_leaf_edge()));
227 }
228 match &mut self.front {
229 None => None,
230 Some(LazyLeafHandle::Edge(edge)) => Some(edge),
231 // SAFETY: the code above would have replaced it.
232 Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
233 }
234 }
235
236 fn init_back(
237 &mut self,
238 ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
239 if let Some(LazyLeafHandle::Root(root)) = &self.back {
240 self.back = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.last_leaf_edge()));
241 }
242 match &mut self.back {
243 None => None,
244 Some(LazyLeafHandle::Edge(edge)) => Some(edge),
245 // SAFETY: the code above would have replaced it.
246 Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
247 }
248 }
249}
250
251impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
252 /// Finds the distinct leaf edges delimiting a specified range in a tree.
253 ///
254 /// If such distinct edges exist, returns them in ascending order, meaning
255 /// that a non-zero number of calls to `next_unchecked` on the `front` of
256 /// the result and/or calls to `next_back_unchecked` on the `back` of the
257 /// result will eventually reach the same edge.
258 ///
259 /// If there are no such edges, i.e., if the tree contains no key within
260 /// the range, returns an empty `front` and `back`.
261 ///
262 /// # Safety
263 /// Unless `BorrowType` is `Immut`, do not use the handles to visit the same
264 /// KV twice.
265 unsafe fn find_leaf_edges_spanning_range<Q: ?Sized, R>(
266 self,
267 range: R,
268 ) -> LeafRange<BorrowType, K, V>
269 where
270 Q: Ord,
271 K: Borrow<Q>,
272 R: RangeBounds<Q>,
273 {
274 match self.search_tree_for_bifurcation(&range) {
275 Err(_) => LeafRange::none(),
276 Ok((
277 node,
278 lower_edge_idx,
279 upper_edge_idx,
280 mut lower_child_bound,
281 mut upper_child_bound,
282 )) => {
283 let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) };
284 let mut upper_edge = unsafe { Handle::new_edge(node, upper_edge_idx) };
285 loop {
286 match (lower_edge.force(), upper_edge.force()) {
287 (Leaf(f), Leaf(b)) => return LeafRange { front: Some(f), back: Some(b) },
288 (Internal(f), Internal(b)) => {
289 (lower_edge, lower_child_bound) =
290 f.descend().find_lower_bound_edge(lower_child_bound);
291 (upper_edge, upper_child_bound) =
292 b.descend().find_upper_bound_edge(upper_child_bound);
293 }
294 _ => unreachable!("BTreeMap has different depths"),
295 }
296 }
297 }
298 }
299 }
300}
301
302fn full_range<BorrowType: marker::BorrowType, K, V>(
303 root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
304 root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
305) -> LazyLeafRange<BorrowType, K, V> {
306 LazyLeafRange {
307 front: Some(LazyLeafHandle::Root(root1)),
308 back: Some(LazyLeafHandle::Root(root2)),
309 }
310}
311
312impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
313 /// Finds the pair of leaf edges delimiting a specific range in a tree.
314 ///
315 /// The result is meaningful only if the tree is ordered by key, like the tree
316 /// in a `BTreeMap` is.
317 pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::Immut<'a>, K, V>
318 where
319 Q: ?Sized + Ord,
320 K: Borrow<Q>,
321 R: RangeBounds<Q>,
322 {
323 // SAFETY: our borrow type is immutable.
324 unsafe { self.find_leaf_edges_spanning_range(range) }
325 }
326
327 /// Finds the pair of leaf edges delimiting an entire tree.
328 pub fn full_range(self) -> LazyLeafRange<marker::Immut<'a>, K, V> {
329 full_range(self, self)
330 }
331}
332
333impl<'a, K: 'a, V: 'a> NodeRef<marker::ValMut<'a>, K, V, marker::LeafOrInternal> {
334 /// Splits a unique reference into a pair of leaf edges delimiting a specified range.
335 /// The result are non-unique references allowing (some) mutation, which must be used
336 /// carefully.
337 ///
338 /// The result is meaningful only if the tree is ordered by key, like the tree
339 /// in a `BTreeMap` is.
340 ///
341 /// # Safety
342 /// Do not use the duplicate handles to visit the same KV twice.
343 pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::ValMut<'a>, K, V>
344 where
345 Q: ?Sized + Ord,
346 K: Borrow<Q>,
347 R: RangeBounds<Q>,
348 {
349 unsafe { self.find_leaf_edges_spanning_range(range) }
350 }
351
352 /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
353 /// The results are non-unique references allowing mutation (of values only), so must be used
354 /// with care.
355 pub fn full_range(self) -> LazyLeafRange<marker::ValMut<'a>, K, V> {
356 // We duplicate the root NodeRef here -- we will never visit the same KV
357 // twice, and never end up with overlapping value references.
358 let self2 = unsafe { ptr::read(&self) };
359 full_range(self, self2)
360 }
361}
362
363impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
364 /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
365 /// The results are non-unique references allowing massively destructive mutation, so must be
366 /// used with the utmost care.
367 pub fn full_range(self) -> LazyLeafRange<marker::Dying, K, V> {
368 // We duplicate the root NodeRef here -- we will never access it in a way
369 // that overlaps references obtained from the root.
370 let self2: NodeRef = unsafe { ptr::read(&self) };
371 full_range(self, root2:self2)
372 }
373}
374
375impl<BorrowType: marker::BorrowType, K, V>
376 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
377{
378 /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
379 /// on the right side, which is either in the same leaf node or in an ancestor node.
380 /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
381 pub fn next_kv(
382 self,
383 ) -> Result<
384 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
385 NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
386 > {
387 let mut edge = self.forget_node_type();
388 loop {
389 edge = match edge.right_kv() {
390 Ok(kv) => return Ok(kv),
391 Err(last_edge) => match last_edge.into_node().ascend() {
392 Ok(parent_edge) => parent_edge.forget_node_type(),
393 Err(root) => return Err(root),
394 },
395 }
396 }
397 }
398
399 /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
400 /// on the left side, which is either in the same leaf node or in an ancestor node.
401 /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
402 pub fn next_back_kv(
403 self,
404 ) -> Result<
405 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
406 NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
407 > {
408 let mut edge = self.forget_node_type();
409 loop {
410 edge = match edge.left_kv() {
411 Ok(kv) => return Ok(kv),
412 Err(last_edge) => match last_edge.into_node().ascend() {
413 Ok(parent_edge) => parent_edge.forget_node_type(),
414 Err(root) => return Err(root),
415 },
416 }
417 }
418 }
419}
420
421impl<BorrowType: marker::BorrowType, K, V>
422 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
423{
424 /// Given an internal edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
425 /// on the right side, which is either in the same internal node or in an ancestor node.
426 /// If the internal edge is the last one in the tree, returns [`Result::Err`] with the root node.
427 fn next_kv(
428 self,
429 ) -> Result<
430 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>,
431 NodeRef<BorrowType, K, V, marker::Internal>,
432 > {
433 let mut edge: Handle, …> = self;
434 loop {
435 edge = match edge.right_kv() {
436 Ok(internal_kv: Handle, …>) => return Ok(internal_kv),
437 Err(last_edge: Handle, …>) => match last_edge.into_node().ascend() {
438 Ok(parent_edge: Handle, …>) => parent_edge,
439 Err(root: NodeRef) => return Err(root),
440 },
441 }
442 }
443 }
444}
445
446impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
447 /// Given a leaf edge handle into a dying tree, returns the next leaf edge
448 /// on the right side, and the key-value pair in between, if they exist.
449 ///
450 /// If the given edge is the last one in a leaf, this method deallocates
451 /// the leaf, as well as any ancestor nodes whose last edge was reached.
452 /// This implies that if no more key-value pair follows, the entire tree
453 /// will have been deallocated and there is nothing left to return.
454 ///
455 /// # Safety
456 /// - The given edge must not have been previously returned by counterpart
457 /// `deallocating_next_back`.
458 /// - The returned KV handle is only valid to access the key and value,
459 /// and only valid until the next call to a `deallocating_` method.
460 unsafe fn deallocating_next<A: Allocator + Clone>(
461 self,
462 alloc: A,
463 ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
464 {
465 let mut edge = self.forget_node_type();
466 loop {
467 edge = match edge.right_kv() {
468 Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)),
469 Err(last_edge) => {
470 match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
471 Some(parent_edge) => parent_edge.forget_node_type(),
472 None => return None,
473 }
474 }
475 }
476 }
477 }
478
479 /// Given a leaf edge handle into a dying tree, returns the next leaf edge
480 /// on the left side, and the key-value pair in between, if they exist.
481 ///
482 /// If the given edge is the first one in a leaf, this method deallocates
483 /// the leaf, as well as any ancestor nodes whose first edge was reached.
484 /// This implies that if no more key-value pair follows, the entire tree
485 /// will have been deallocated and there is nothing left to return.
486 ///
487 /// # Safety
488 /// - The given edge must not have been previously returned by counterpart
489 /// `deallocating_next`.
490 /// - The returned KV handle is only valid to access the key and value,
491 /// and only valid until the next call to a `deallocating_` method.
492 unsafe fn deallocating_next_back<A: Allocator + Clone>(
493 self,
494 alloc: A,
495 ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
496 {
497 let mut edge = self.forget_node_type();
498 loop {
499 edge = match edge.left_kv() {
500 Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)),
501 Err(last_edge) => {
502 match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
503 Some(parent_edge) => parent_edge.forget_node_type(),
504 None => return None,
505 }
506 }
507 }
508 }
509 }
510
511 /// Deallocates a pile of nodes from the leaf up to the root.
512 /// This is the only way to deallocate the remainder of a tree after
513 /// `deallocating_next` and `deallocating_next_back` have been nibbling at
514 /// both sides of the tree, and have hit the same edge. As it is intended
515 /// only to be called when all keys and values have been returned,
516 /// no cleanup is done on any of the keys or values.
517 fn deallocating_end<A: Allocator + Clone>(self, alloc: A) {
518 let mut edge = self.forget_node_type();
519 while let Some(parent_edge) =
520 unsafe { edge.into_node().deallocate_and_ascend(alloc.clone()) }
521 {
522 edge = parent_edge.forget_node_type();
523 }
524 }
525}
526
527impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
528 /// Moves the leaf edge handle to the next leaf edge and returns references to the
529 /// key and value in between.
530 ///
531 /// # Safety
532 /// There must be another KV in the direction travelled.
533 unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
534 super::mem::replace(self, |leaf_edge| {
535 let kv = leaf_edge.next_kv().ok().unwrap();
536 (kv.next_leaf_edge(), kv.into_kv())
537 })
538 }
539
540 /// Moves the leaf edge handle to the previous leaf edge and returns references to the
541 /// key and value in between.
542 ///
543 /// # Safety
544 /// There must be another KV in the direction travelled.
545 unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
546 super::mem::replace(self, |leaf_edge| {
547 let kv = leaf_edge.next_back_kv().ok().unwrap();
548 (kv.next_back_leaf_edge(), kv.into_kv())
549 })
550 }
551}
552
553impl<'a, K, V> Handle<NodeRef<marker::ValMut<'a>, K, V, marker::Leaf>, marker::Edge> {
554 /// Moves the leaf edge handle to the next leaf edge and returns references to the
555 /// key and value in between.
556 ///
557 /// # Safety
558 /// There must be another KV in the direction travelled.
559 unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
560 let kv = super::mem::replace(self, |leaf_edge| {
561 let kv = leaf_edge.next_kv().ok().unwrap();
562 (unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)
563 });
564 // Doing this last is faster, according to benchmarks.
565 kv.into_kv_valmut()
566 }
567
568 /// Moves the leaf edge handle to the previous leaf and returns references to the
569 /// key and value in between.
570 ///
571 /// # Safety
572 /// There must be another KV in the direction travelled.
573 unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
574 let kv = super::mem::replace(self, |leaf_edge| {
575 let kv = leaf_edge.next_back_kv().ok().unwrap();
576 (unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)
577 });
578 // Doing this last is faster, according to benchmarks.
579 kv.into_kv_valmut()
580 }
581}
582
583impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
584 /// Moves the leaf edge handle to the next leaf edge and returns the key and value
585 /// in between, deallocating any node left behind while leaving the corresponding
586 /// edge in its parent node dangling.
587 ///
588 /// # Safety
589 /// - There must be another KV in the direction travelled.
590 /// - That KV was not previously returned by counterpart
591 /// `deallocating_next_back_unchecked` on any copy of the handles
592 /// being used to traverse the tree.
593 ///
594 /// The only safe way to proceed with the updated handle is to compare it, drop it,
595 /// or call this method or counterpart `deallocating_next_back_unchecked` again.
596 unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
597 &mut self,
598 alloc: A,
599 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
600 super::mem::replace(self, |leaf_edge| unsafe {
601 leaf_edge.deallocating_next(alloc).unwrap()
602 })
603 }
604
605 /// Moves the leaf edge handle to the previous leaf edge and returns the key and value
606 /// in between, deallocating any node left behind while leaving the corresponding
607 /// edge in its parent node dangling.
608 ///
609 /// # Safety
610 /// - There must be another KV in the direction travelled.
611 /// - That leaf edge was not previously returned by counterpart
612 /// `deallocating_next_unchecked` on any copy of the handles
613 /// being used to traverse the tree.
614 ///
615 /// The only safe way to proceed with the updated handle is to compare it, drop it,
616 /// or call this method or counterpart `deallocating_next_unchecked` again.
617 unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
618 &mut self,
619 alloc: A,
620 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
621 super::mem::replace(self, |leaf_edge| unsafe {
622 leaf_edge.deallocating_next_back(alloc).unwrap()
623 })
624 }
625}
626
627impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
628 /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
629 /// you need first when navigating forward (or last when navigating backward).
630 #[inline]
631 pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
632 let mut node = self;
633 loop {
634 match node.force() {
635 Leaf(leaf) => return leaf.first_edge(),
636 Internal(internal) => node = internal.first_edge().descend(),
637 }
638 }
639 }
640
641 /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
642 /// you need last when navigating forward (or first when navigating backward).
643 #[inline]
644 pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
645 let mut node = self;
646 loop {
647 match node.force() {
648 Leaf(leaf) => return leaf.last_edge(),
649 Internal(internal) => node = internal.last_edge().descend(),
650 }
651 }
652 }
653}
654
655pub enum Position<BorrowType, K, V> {
656 Leaf(NodeRef<BorrowType, K, V, marker::Leaf>),
657 Internal(NodeRef<BorrowType, K, V, marker::Internal>),
658 InternalKV(Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>),
659}
660
661impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
662 /// Visits leaf nodes and internal KVs in order of ascending keys, and also
663 /// visits internal nodes as a whole in a depth first order, meaning that
664 /// internal nodes precede their individual KVs and their child nodes.
665 pub fn visit_nodes_in_order<F>(self, mut visit: F)
666 where
667 F: FnMut(Position<marker::Immut<'a>, K, V>),
668 {
669 match self.force() {
670 Leaf(leaf) => visit(Position::Leaf(leaf)),
671 Internal(internal) => {
672 visit(Position::Internal(internal));
673 let mut edge = internal.first_edge();
674 loop {
675 edge = match edge.descend().force() {
676 Leaf(leaf) => {
677 visit(Position::Leaf(leaf));
678 match edge.next_kv() {
679 Ok(kv) => {
680 visit(Position::InternalKV(kv));
681 kv.right_edge()
682 }
683 Err(_) => return,
684 }
685 }
686 Internal(internal) => {
687 visit(Position::Internal(internal));
688 internal.first_edge()
689 }
690 }
691 }
692 }
693 }
694 }
695
696 /// Calculates the number of elements in a (sub)tree.
697 pub fn calc_length(self) -> usize {
698 let mut result = 0;
699 self.visit_nodes_in_order(|pos| match pos {
700 Position::Leaf(node) => result += node.len(),
701 Position::Internal(node) => result += node.len(),
702 Position::InternalKV(_) => (),
703 });
704 result
705 }
706}
707
708impl<BorrowType: marker::BorrowType, K, V>
709 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>
710{
711 /// Returns the leaf edge closest to a KV for forward navigation.
712 pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
713 match self.force() {
714 Leaf(leaf_kv) => leaf_kv.right_edge(),
715 Internal(internal_kv) => {
716 let next_internal_edge = internal_kv.right_edge();
717 next_internal_edge.descend().first_leaf_edge()
718 }
719 }
720 }
721
722 /// Returns the leaf edge closest to a KV for backward navigation.
723 pub fn next_back_leaf_edge(
724 self,
725 ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
726 match self.force() {
727 Leaf(leaf_kv) => leaf_kv.left_edge(),
728 Internal(internal_kv) => {
729 let next_internal_edge = internal_kv.left_edge();
730 next_internal_edge.descend().last_leaf_edge()
731 }
732 }
733 }
734}
735
736impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
737 /// Returns the leaf edge corresponding to the first point at which the
738 /// given bound is true.
739 pub fn lower_bound<Q: ?Sized>(
740 self,
741 mut bound: SearchBound<&Q>,
742 ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
743 where
744 Q: Ord,
745 K: Borrow<Q>,
746 {
747 let mut node = self;
748 loop {
749 let (edge, new_bound) = node.find_lower_bound_edge(bound);
750 match edge.force() {
751 Leaf(edge) => return edge,
752 Internal(edge) => {
753 node = edge.descend();
754 bound = new_bound;
755 }
756 }
757 }
758 }
759
760 /// Returns the leaf edge corresponding to the last point at which the
761 /// given bound is true.
762 pub fn upper_bound<Q: ?Sized>(
763 self,
764 mut bound: SearchBound<&Q>,
765 ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
766 where
767 Q: Ord,
768 K: Borrow<Q>,
769 {
770 let mut node = self;
771 loop {
772 let (edge, new_bound) = node.find_upper_bound_edge(bound);
773 match edge.force() {
774 Leaf(edge) => return edge,
775 Internal(edge) => {
776 node = edge.descend();
777 bound = new_bound;
778 }
779 }
780 }
781 }
782}
783