1// This is an attempt at an implementation following the ideal
2//
3// ```
4// struct BTreeMap<K, V> {
5// height: usize,
6// root: Option<Box<Node<K, V, height>>>
7// }
8//
9// struct Node<K, V, height: usize> {
10// keys: [K; 2 * B - 1],
11// vals: [V; 2 * B - 1],
12// edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
13// parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
14// len: u16,
15// }
16// ```
17//
18// Since Rust doesn't actually have dependent types and polymorphic recursion,
19// we make do with lots of unsafety.
20
21// A major goal of this module is to avoid complexity by treating the tree as a generic (if
22// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
23// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
24// even what underfull means. However, we do rely on a few invariants:
25//
26// - Trees must have uniform depth/height. This means that every path down to a leaf from a
27// given node has exactly the same length.
28// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
29// This implies that even an empty node has at least one edge.
30// For a leaf node, "having an edge" only means we can identify a position in the node,
31// since leaf edges are empty and need no data representation. In an internal node,
32// an edge both identifies a position and contains a pointer to a child node.
33
34use core::marker::PhantomData;
35use core::mem::{self, MaybeUninit};
36use core::ptr::{self, NonNull};
37use core::slice::SliceIndex;
38
39use crate::alloc::{Allocator, Layout};
40use crate::boxed::Box;
41
42const B: usize = 6;
43pub const CAPACITY: usize = 2 * B - 1;
44pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
45const KV_IDX_CENTER: usize = B - 1;
46const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
47const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
48
49/// The underlying representation of leaf nodes and part of the representation of internal nodes.
50struct LeafNode<K, V> {
51 /// We want to be covariant in `K` and `V`.
52 parent: Option<NonNull<InternalNode<K, V>>>,
53
54 /// This node's index into the parent node's `edges` array.
55 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
56 /// This is only guaranteed to be initialized when `parent` is non-null.
57 parent_idx: MaybeUninit<u16>,
58
59 /// The number of keys and values this node stores.
60 len: u16,
61
62 /// The arrays storing the actual data of the node. Only the first `len` elements of each
63 /// array are initialized and valid.
64 keys: [MaybeUninit<K>; CAPACITY],
65 vals: [MaybeUninit<V>; CAPACITY],
66}
67
68impl<K, V> LeafNode<K, V> {
69 /// Initializes a new `LeafNode` in-place.
70 unsafe fn init(this: *mut Self) {
71 // As a general policy, we leave fields uninitialized if they can be, as this should
72 // be both slightly faster and easier to track in Valgrind.
73 unsafe {
74 // parent_idx, keys, and vals are all MaybeUninit
75 ptr::addr_of_mut!((*this).parent).write(val:None);
76 ptr::addr_of_mut!((*this).len).write(val:0);
77 }
78 }
79
80 /// Creates a new boxed `LeafNode`.
81 fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
82 unsafe {
83 let mut leaf: Box>, …> = Box::new_uninit_in(alloc);
84 LeafNode::init(this:leaf.as_mut_ptr());
85 leaf.assume_init()
86 }
87 }
88}
89
90/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
91/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
92/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
93/// node, allowing code to act on leaf and internal nodes generically without having to even check
94/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
95#[repr(C)]
96// gdb_providers.py uses this type name for introspection.
97struct InternalNode<K, V> {
98 data: LeafNode<K, V>,
99
100 /// The pointers to the children of this node. `len + 1` of these are considered
101 /// initialized and valid, except that near the end, while the tree is held
102 /// through borrow type `Dying`, some of these pointers are dangling.
103 edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
104}
105
106impl<K, V> InternalNode<K, V> {
107 /// Creates a new boxed `InternalNode`.
108 ///
109 /// # Safety
110 /// An invariant of internal nodes is that they have at least one
111 /// initialized and valid edge. This function does not set up
112 /// such an edge.
113 unsafe fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
114 unsafe {
115 let mut node: Box>, …> = Box::<Self, _>::new_uninit_in(alloc);
116 // We only need to initialize the data; the edges are MaybeUninit.
117 LeafNode::init(this:ptr::addr_of_mut!((*node.as_mut_ptr()).data));
118 node.assume_init()
119 }
120 }
121}
122
123/// A managed, non-null pointer to a node. This is either an owned pointer to
124/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
125///
126/// However, `BoxedNode` contains no information as to which of the two types
127/// of nodes it actually contains, and, partially due to this lack of information,
128/// is not a separate type and has no destructor.
129type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
130
131// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
132// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
133// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
134// However, whenever a public type wraps `NodeRef`, make sure that it has the
135// correct variance.
136///
137/// A reference to a node.
138///
139/// This type has a number of parameters that controls how it acts:
140/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
141/// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
142/// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
143/// with respect to keys and tree structure, but also allows many
144/// mutable references to values throughout the tree to coexist.
145/// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
146/// although insert methods allow a mutable pointer to a value to coexist.
147/// - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
148/// but does not have a destructor, and must be cleaned up manually.
149/// - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
150/// but has methods to destroy the tree bit by bit, and ordinary methods,
151/// while not marked as unsafe to call, can invoke UB if called incorrectly.
152/// Since any `NodeRef` allows navigating through the tree, `BorrowType`
153/// effectively applies to the entire tree, not just to the node itself.
154/// - `K` and `V`: These are the types of keys and values stored in the nodes.
155/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
156/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
157/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
158/// `NodeRef` could be pointing to either type of node.
159/// `Type` is named `NodeType` when used outside `NodeRef`.
160///
161/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
162/// exploit static type safety. There are limitations in the way we can apply
163/// such restrictions:
164/// - For each type parameter, we can only define a method either generically
165/// or for one particular type. For example, we cannot define a method like
166/// `into_kv` generically for all `BorrowType`, or once for all types that
167/// carry a lifetime, because we want it to return `&'a` references.
168/// Therefore, we define it only for the least powerful type `Immut<'a>`.
169/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
170/// Therefore, we have to explicitly call `reborrow` on a more powerful
171/// `NodeRef` in order to reach a method like `into_kv`.
172///
173/// All methods on `NodeRef` that return some kind of reference, either:
174/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
175/// Sometimes, to invoke such a method, we need to call `reborrow_mut`.
176/// - Take `self` by reference, and (implicitly) return that reference's
177/// lifetime, instead of the lifetime carried by `BorrowType`. That way,
178/// the borrow checker guarantees that the `NodeRef` remains borrowed as long
179/// as the returned reference is used.
180/// The methods supporting insert bend this rule by returning a raw pointer,
181/// i.e., a reference without any lifetime.
182pub struct NodeRef<BorrowType, K, V, Type> {
183 /// The number of levels that the node and the level of leaves are apart, a
184 /// constant of the node that cannot be entirely described by `Type`, and that
185 /// the node itself does not store. We only need to store the height of the root
186 /// node, and derive every other node's height from it.
187 /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
188 height: usize,
189 /// The pointer to the leaf or internal node. The definition of `InternalNode`
190 /// ensures that the pointer is valid either way.
191 node: NonNull<LeafNode<K, V>>,
192 _marker: PhantomData<(BorrowType, Type)>,
193}
194
195/// The root node of an owned tree.
196///
197/// Note that this does not have a destructor, and must be cleaned up manually.
198pub type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
199
200impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
201impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
202 fn clone(&self) -> Self {
203 *self
204 }
205}
206
207unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
208
209unsafe impl<K: Sync, V: Sync, Type> Send for NodeRef<marker::Immut<'_>, K, V, Type> {}
210unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Mut<'_>, K, V, Type> {}
211unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::ValMut<'_>, K, V, Type> {}
212unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
213unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
214
215impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
216 pub fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
217 Self::from_new_leaf(LeafNode::new(alloc))
218 }
219
220 fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
221 NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
222 }
223}
224
225impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
226 fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
227 let mut new_node: Box, A> = unsafe { InternalNode::new(alloc) };
228 new_node.edges[0].write(val:child.node);
229 unsafe { NodeRef::from_new_internal(internal:new_node, height:child.height + 1) }
230 }
231
232 /// # Safety
233 /// `height` must not be zero.
234 unsafe fn from_new_internal<A: Allocator + Clone>(
235 internal: Box<InternalNode<K, V>, A>,
236 height: usize,
237 ) -> Self {
238 debug_assert!(height > 0);
239 let node: NonNull> = NonNull::from(Box::leak(internal)).cast();
240 let mut this: NodeRef = NodeRef { height, node, _marker: PhantomData };
241 this.borrow_mut().correct_all_childrens_parent_links();
242 this
243 }
244}
245
246impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
247 /// Unpack a node reference that was packed as `NodeRef::parent`.
248 fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
249 debug_assert!(height > 0);
250 NodeRef { height, node: node.cast(), _marker: PhantomData }
251 }
252}
253
254impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
255 /// Exposes the data of an internal node.
256 ///
257 /// Returns a raw ptr to avoid invalidating other references to this node.
258 fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
259 // SAFETY: the static node type is `Internal`.
260 this.node.as_ptr() as *mut InternalNode<K, V>
261 }
262}
263
264impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
265 /// Borrows exclusive access to the data of an internal node.
266 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
267 let ptr: *mut InternalNode = Self::as_internal_ptr(self);
268 unsafe { &mut *ptr }
269 }
270}
271
272impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
273 /// Finds the length of the node. This is the number of keys or values.
274 /// The number of edges is `len() + 1`.
275 /// Note that, despite being safe, calling this function can have the side effect
276 /// of invalidating mutable references that unsafe code has created.
277 pub fn len(&self) -> usize {
278 // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
279 // there might be outstanding mutable references to values that we must not invalidate.
280 unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
281 }
282
283 /// Returns the number of levels that the node and leaves are apart. Zero
284 /// height means the node is a leaf itself. If you picture trees with the
285 /// root on top, the number says at which elevation the node appears.
286 /// If you picture trees with leaves on top, the number says how high
287 /// the tree extends above the node.
288 pub fn height(&self) -> usize {
289 self.height
290 }
291
292 /// Temporarily takes out another, immutable reference to the same node.
293 pub fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
294 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
295 }
296
297 /// Exposes the leaf portion of any leaf or internal node.
298 ///
299 /// Returns a raw ptr to avoid invalidating other references to this node.
300 fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
301 // The node must be valid for at least the LeafNode portion.
302 // This is not a reference in the NodeRef type because we don't know if
303 // it should be unique or shared.
304 this.node.as_ptr()
305 }
306}
307
308impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
309 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
310 /// node actually has a parent, where `handle` points to the edge of the parent
311 /// that points to the current node. Returns `Err(self)` if the current node has
312 /// no parent, giving back the original `NodeRef`.
313 ///
314 /// The method name assumes you picture trees with the root node on top.
315 ///
316 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
317 /// both, upon success, do nothing.
318 pub fn ascend(
319 self,
320 ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
321 const {
322 assert!(BorrowType::TRAVERSAL_PERMIT);
323 }
324
325 // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
326 // there might be outstanding mutable references to values that we must not invalidate.
327 let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
328 unsafe { (*leaf_ptr).parent }
329 .as_ref()
330 .map(|parent| Handle {
331 node: NodeRef::from_internal(*parent, self.height + 1),
332 idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
333 _marker: PhantomData,
334 })
335 .ok_or(self)
336 }
337
338 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
339 unsafe { Handle::new_edge(self, 0) }
340 }
341
342 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
343 let len = self.len();
344 unsafe { Handle::new_edge(self, len) }
345 }
346
347 /// Note that `self` must be nonempty.
348 pub fn first_kv(self) -> Handle<Self, marker::KV> {
349 let len = self.len();
350 assert!(len > 0);
351 unsafe { Handle::new_kv(self, 0) }
352 }
353
354 /// Note that `self` must be nonempty.
355 pub fn last_kv(self) -> Handle<Self, marker::KV> {
356 let len = self.len();
357 assert!(len > 0);
358 unsafe { Handle::new_kv(self, len - 1) }
359 }
360}
361
362impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
363 /// Could be a public implementation of PartialEq, but only used in this module.
364 fn eq(&self, other: &Self) -> bool {
365 let Self { node: &NonNull>, height: &usize, _marker: &PhantomData<(BorrowType, …)> } = self;
366 if node.eq(&other.node) {
367 debug_assert_eq!(*height, other.height);
368 true
369 } else {
370 false
371 }
372 }
373}
374
375impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
376 /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
377 fn into_leaf(self) -> &'a LeafNode<K, V> {
378 let ptr: *mut LeafNode = Self::as_leaf_ptr(&self);
379 // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
380 unsafe { &*ptr }
381 }
382
383 /// Borrows a view into the keys stored in the node.
384 pub fn keys(&self) -> &[K] {
385 let leaf: &LeafNode = self.into_leaf();
386 unsafe {
387 MaybeUninit::slice_assume_init_ref(slice:leaf.keys.get_unchecked(..usize::from(leaf.len)))
388 }
389 }
390}
391
392impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
393 /// Similar to `ascend`, gets a reference to a node's parent node, but also
394 /// deallocates the current node in the process. This is unsafe because the
395 /// current node will still be accessible despite being deallocated.
396 pub unsafe fn deallocate_and_ascend<A: Allocator + Clone>(
397 self,
398 alloc: A,
399 ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
400 let height: usize = self.height;
401 let node: NonNull> = self.node;
402 let ret: Option, …>> = self.ascend().ok();
403 unsafe {
404 alloc.deallocate(
405 ptr:node.cast(),
406 layout:if height > 0 {
407 Layout::new::<InternalNode<K, V>>()
408 } else {
409 Layout::new::<LeafNode<K, V>>()
410 },
411 );
412 }
413 ret
414 }
415}
416
417impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
418 /// Temporarily takes out another mutable reference to the same node. Beware, as
419 /// this method is very dangerous, doubly so since it might not immediately appear
420 /// dangerous.
421 ///
422 /// Because mutable pointers can roam anywhere around the tree, the returned
423 /// pointer can easily be used to make the original pointer dangling, out of
424 /// bounds, or invalid under stacked borrow rules.
425 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
426 // that restricts the use of navigation methods on reborrowed pointers,
427 // preventing this unsafety.
428 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
429 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
430 }
431
432 /// Borrows exclusive access to the leaf portion of a leaf or internal node.
433 fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
434 let ptr = Self::as_leaf_ptr(self);
435 // SAFETY: we have exclusive access to the entire node.
436 unsafe { &mut *ptr }
437 }
438
439 /// Offers exclusive access to the leaf portion of a leaf or internal node.
440 fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
441 let ptr = Self::as_leaf_ptr(&mut self);
442 // SAFETY: we have exclusive access to the entire node.
443 unsafe { &mut *ptr }
444 }
445
446 /// Returns a dormant copy of this node with its lifetime erased which can
447 /// be reawakened later.
448 pub fn dormant(&self) -> NodeRef<marker::DormantMut, K, V, Type> {
449 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
450 }
451}
452
453impl<K, V, Type> NodeRef<marker::DormantMut, K, V, Type> {
454 /// Revert to the unique borrow initially captured.
455 ///
456 /// # Safety
457 ///
458 /// The reborrow must have ended, i.e., the reference returned by `new` and
459 /// all pointers and references derived from it, must not be used anymore.
460 pub unsafe fn awaken<'a>(self) -> NodeRef<marker::Mut<'a>, K, V, Type> {
461 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
462 }
463}
464
465impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
466 /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
467 fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
468 let ptr: *mut LeafNode = Self::as_leaf_ptr(self);
469 // SAFETY: we have exclusive access to the entire node.
470 unsafe { &mut *ptr }
471 }
472}
473
474impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
475 /// Borrows exclusive access to an element of the key storage area.
476 ///
477 /// # Safety
478 /// `index` is in bounds of 0..CAPACITY
479 unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
480 where
481 I: SliceIndex<[MaybeUninit<K>], Output = Output>,
482 {
483 // SAFETY: the caller will not be able to call further methods on self
484 // until the key slice reference is dropped, as we have unique access
485 // for the lifetime of the borrow.
486 unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
487 }
488
489 /// Borrows exclusive access to an element or slice of the node's value storage area.
490 ///
491 /// # Safety
492 /// `index` is in bounds of 0..CAPACITY
493 unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
494 where
495 I: SliceIndex<[MaybeUninit<V>], Output = Output>,
496 {
497 // SAFETY: the caller will not be able to call further methods on self
498 // until the value slice reference is dropped, as we have unique access
499 // for the lifetime of the borrow.
500 unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
501 }
502}
503
504impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
505 /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
506 ///
507 /// # Safety
508 /// `index` is in bounds of 0..CAPACITY + 1
509 unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
510 where
511 I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
512 {
513 // SAFETY: the caller will not be able to call further methods on self
514 // until the edge slice reference is dropped, as we have unique access
515 // for the lifetime of the borrow.
516 unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
517 }
518}
519
520impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
521 /// # Safety
522 /// - The node has more than `idx` initialized elements.
523 unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
524 // We only create a reference to the one element we are interested in,
525 // to avoid aliasing with outstanding references to other elements,
526 // in particular, those returned to the caller in earlier iterations.
527 let leaf: *mut LeafNode = Self::as_leaf_ptr(&mut self);
528 let keys: *const [MaybeUninit; 11] = unsafe { ptr::addr_of!((*leaf).keys) };
529 let vals: *mut [MaybeUninit; 11] = unsafe { ptr::addr_of_mut!((*leaf).vals) };
530 // We must coerce to unsized array pointers because of Rust issue #74679.
531 let keys: *const [_] = keys;
532 let vals: *mut [_] = vals;
533 let key: &K = unsafe { (&*keys.get_unchecked(index:idx)).assume_init_ref() };
534 let val: &mut V = unsafe { (&mut *vals.get_unchecked_mut(index:idx)).assume_init_mut() };
535 (key, val)
536 }
537}
538
539impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
540 /// Borrows exclusive access to the length of the node.
541 pub fn len_mut(&mut self) -> &mut u16 {
542 &mut self.as_leaf_mut().len
543 }
544}
545
546impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
547 /// # Safety
548 /// Every item returned by `range` is a valid edge index for the node.
549 unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
550 for i: usize in range {
551 debug_assert!(i <= self.len());
552 unsafe { Handle::new_edge(self.reborrow_mut(), idx:i) }.correct_parent_link();
553 }
554 }
555
556 fn correct_all_childrens_parent_links(&mut self) {
557 let len: usize = self.len();
558 unsafe { self.correct_childrens_parent_links(range:0..=len) };
559 }
560}
561
562impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
563 /// Sets the node's link to its parent edge,
564 /// without invalidating other references to the node.
565 fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
566 let leaf: *mut LeafNode = Self::as_leaf_ptr(self);
567 unsafe { (*leaf).parent = Some(parent) };
568 unsafe { (*leaf).parent_idx.write(val:parent_idx as u16) };
569 }
570}
571
572impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
573 /// Clears the root's link to its parent edge.
574 fn clear_parent_link(&mut self) {
575 let mut root_node: NodeRef, K, V, LeafOrInternal> = self.borrow_mut();
576 let leaf: &mut LeafNode = root_node.as_leaf_mut();
577 leaf.parent = None;
578 }
579}
580
581impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
582 /// Returns a new owned tree, with its own root node that is initially empty.
583 pub fn new<A: Allocator + Clone>(alloc: A) -> Self {
584 NodeRef::new_leaf(alloc).forget_type()
585 }
586
587 /// Adds a new internal node with a single edge pointing to the previous root node,
588 /// make that new node the root node, and return it. This increases the height by 1
589 /// and is the opposite of `pop_internal_level`.
590 pub fn push_internal_level<A: Allocator + Clone>(
591 &mut self,
592 alloc: A,
593 ) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
594 super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
595
596 // `self.borrow_mut()`, except that we just forgot we're internal now:
597 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
598 }
599
600 /// Removes the internal root node, using its first child as the new root node.
601 /// As it is intended only to be called when the root node has only one child,
602 /// no cleanup is done on any of the keys, values and other children.
603 /// This decreases the height by 1 and is the opposite of `push_internal_level`.
604 ///
605 /// Requires exclusive access to the `NodeRef` object but not to the root node;
606 /// it will not invalidate other handles or references to the root node.
607 ///
608 /// Panics if there is no internal level, i.e., if the root node is a leaf.
609 pub fn pop_internal_level<A: Allocator + Clone>(&mut self, alloc: A) {
610 assert!(self.height > 0);
611
612 let top = self.node;
613
614 // SAFETY: we asserted to be internal.
615 let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
616 // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
617 let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
618 // SAFETY: the first edge is always initialized.
619 self.node = unsafe { internal_node.edges[0].assume_init_read() };
620 self.height -= 1;
621 self.clear_parent_link();
622
623 unsafe {
624 alloc.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
625 }
626 }
627}
628
629impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
630 /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
631 /// because the return value cannot be used to destroy the root, and there
632 /// cannot be other references to the tree.
633 pub fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
634 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
635 }
636
637 /// Slightly mutably borrows the owned root node.
638 pub fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
639 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
640 }
641
642 /// Irreversibly transitions to a reference that permits traversal and offers
643 /// destructive methods and little else.
644 pub fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
645 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
646 }
647}
648
649impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
650 /// Adds a key-value pair to the end of the node, and returns
651 /// a handle to the inserted value.
652 ///
653 /// # Safety
654 ///
655 /// The returned handle has an unbound lifetime.
656 pub unsafe fn push_with_handle<'b>(
657 &mut self,
658 key: K,
659 val: V,
660 ) -> Handle<NodeRef<marker::Mut<'b>, K, V, marker::Leaf>, marker::KV> {
661 let len = self.len_mut();
662 let idx = usize::from(*len);
663 assert!(idx < CAPACITY);
664 *len += 1;
665 unsafe {
666 self.key_area_mut(idx).write(key);
667 self.val_area_mut(idx).write(val);
668 Handle::new_kv(
669 NodeRef { height: self.height, node: self.node, _marker: PhantomData },
670 idx,
671 )
672 }
673 }
674
675 /// Adds a key-value pair to the end of the node, and returns
676 /// the mutable reference of the inserted value.
677 pub fn push(&mut self, key: K, val: V) -> *mut V {
678 // SAFETY: The unbound handle is no longer accessible.
679 unsafe { self.push_with_handle(key, val).into_val_mut() }
680 }
681}
682
683impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
684 /// Adds a key-value pair, and an edge to go to the right of that pair,
685 /// to the end of the node.
686 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
687 assert!(edge.height == self.height - 1);
688
689 let len: &mut u16 = self.len_mut();
690 let idx: usize = usize::from(*len);
691 assert!(idx < CAPACITY);
692 *len += 1;
693 unsafe {
694 self.key_area_mut(idx).write(val:key);
695 self.val_area_mut(index:idx).write(val);
696 self.edge_area_mut(idx + 1).write(val:edge.node);
697 Handle::new_edge(self.reborrow_mut(), idx:idx + 1).correct_parent_link();
698 }
699 }
700}
701
702impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
703 /// Removes any static information asserting that this node is a `Leaf` node.
704 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
705 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
706 }
707}
708
709impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
710 /// Removes any static information asserting that this node is an `Internal` node.
711 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
712 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
713 }
714}
715
716impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
717 /// Checks whether a node is an `Internal` node or a `Leaf` node.
718 pub fn force(
719 self,
720 ) -> ForceResult<
721 NodeRef<BorrowType, K, V, marker::Leaf>,
722 NodeRef<BorrowType, K, V, marker::Internal>,
723 > {
724 if self.height == 0 {
725 ForceResult::Leaf(NodeRef {
726 height: self.height,
727 node: self.node,
728 _marker: PhantomData,
729 })
730 } else {
731 ForceResult::Internal(NodeRef {
732 height: self.height,
733 node: self.node,
734 _marker: PhantomData,
735 })
736 }
737 }
738}
739
740impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
741 /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
742 unsafe fn cast_to_leaf_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
743 debug_assert!(self.height == 0);
744 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
745 }
746
747 /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
748 unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
749 debug_assert!(self.height > 0);
750 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
751 }
752}
753
754/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
755/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
756/// pair) or `Edge` (signifying a handle on an edge).
757///
758/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
759/// a child node, these represent the spaces where child pointers would go between the key-value
760/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
761/// to the left of the node, one between the two pairs, and one at the right of the node.
762pub struct Handle<Node, Type> {
763 node: Node,
764 idx: usize,
765 _marker: PhantomData<Type>,
766}
767
768impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
769// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
770// `Clone`able is when it is an immutable reference and therefore `Copy`.
771impl<Node: Copy, Type> Clone for Handle<Node, Type> {
772 fn clone(&self) -> Self {
773 *self
774 }
775}
776
777impl<Node, Type> Handle<Node, Type> {
778 /// Retrieves the node that contains the edge or key-value pair this handle points to.
779 pub fn into_node(self) -> Node {
780 self.node
781 }
782
783 /// Returns the position of this handle in the node.
784 pub fn idx(&self) -> usize {
785 self.idx
786 }
787}
788
789impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
790 /// Creates a new handle to a key-value pair in `node`.
791 /// Unsafe because the caller must ensure that `idx < node.len()`.
792 pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
793 debug_assert!(idx < node.len());
794
795 Handle { node, idx, _marker: PhantomData }
796 }
797
798 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
799 unsafe { Handle::new_edge(self.node, self.idx) }
800 }
801
802 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
803 unsafe { Handle::new_edge(self.node, self.idx + 1) }
804 }
805}
806
807impl<BorrowType, K, V, NodeType, HandleType> PartialEq
808 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
809{
810 fn eq(&self, other: &Self) -> bool {
811 let Self { node: &NodeRef, idx: &usize, _marker: &PhantomData } = self;
812 node.eq(&other.node) && *idx == other.idx
813 }
814}
815
816impl<BorrowType, K, V, NodeType, HandleType>
817 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
818{
819 /// Temporarily takes out another immutable handle on the same location.
820 pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
821 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
822 Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
823 }
824}
825
826impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
827 /// Temporarily takes out another mutable handle on the same location. Beware, as
828 /// this method is very dangerous, doubly so since it might not immediately appear
829 /// dangerous.
830 ///
831 /// For details, see `NodeRef::reborrow_mut`.
832 pub unsafe fn reborrow_mut(
833 &mut self,
834 ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
835 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
836 Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
837 }
838
839 /// Returns a dormant copy of this handle which can be reawakened later.
840 ///
841 /// See `DormantMutRef` for more details.
842 pub fn dormant(&self) -> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
843 Handle { node: self.node.dormant(), idx: self.idx, _marker: PhantomData }
844 }
845}
846
847impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
848 /// Revert to the unique borrow initially captured.
849 ///
850 /// # Safety
851 ///
852 /// The reborrow must have ended, i.e., the reference returned by `new` and
853 /// all pointers and references derived from it, must not be used anymore.
854 pub unsafe fn awaken<'a>(self) -> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
855 Handle { node: unsafe { self.node.awaken() }, idx: self.idx, _marker: PhantomData }
856 }
857}
858
859impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
860 /// Creates a new handle to an edge in `node`.
861 /// Unsafe because the caller must ensure that `idx <= node.len()`.
862 pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
863 debug_assert!(idx <= node.len());
864
865 Handle { node, idx, _marker: PhantomData }
866 }
867
868 pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
869 if self.idx > 0 {
870 Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
871 } else {
872 Err(self)
873 }
874 }
875
876 pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
877 if self.idx < self.node.len() {
878 Ok(unsafe { Handle::new_kv(self.node, self.idx) })
879 } else {
880 Err(self)
881 }
882 }
883}
884
885pub enum LeftOrRight<T> {
886 Left(T),
887 Right(T),
888}
889
890/// Given an edge index where we want to insert into a node filled to capacity,
891/// computes a sensible KV index of a split point and where to perform the insertion.
892/// The goal of the split point is for its key and value to end up in a parent node;
893/// the keys, values and edges to the left of the split point become the left child;
894/// the keys, values and edges to the right of the split point become the right child.
895fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
896 debug_assert!(edge_idx <= CAPACITY);
897 // Rust issue #74834 tries to explain these symmetric rules.
898 match edge_idx {
899 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
900 EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
901 EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
902 _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
903 }
904}
905
906impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
907 /// Inserts a new key-value pair between the key-value pairs to the right and left of
908 /// this edge. This method assumes that there is enough space in the node for the new
909 /// pair to fit.
910 unsafe fn insert_fit(
911 mut self,
912 key: K,
913 val: V,
914 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
915 debug_assert!(self.node.len() < CAPACITY);
916 let new_len: usize = self.node.len() + 1;
917
918 unsafe {
919 slice_insert(self.node.key_area_mut(..new_len), self.idx, val:key);
920 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
921 *self.node.len_mut() = new_len as u16;
922
923 Handle::new_kv(self.node, self.idx)
924 }
925 }
926}
927
928impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
929 /// Inserts a new key-value pair between the key-value pairs to the right and left of
930 /// this edge. This method splits the node if there isn't enough room.
931 ///
932 /// Returns a dormant handle to the inserted node which can be reawakened
933 /// once splitting is complete.
934 fn insert<A: Allocator + Clone>(
935 self,
936 key: K,
937 val: V,
938 alloc: A,
939 ) -> (
940 Option<SplitResult<'a, K, V, marker::Leaf>>,
941 Handle<NodeRef<marker::DormantMut, K, V, marker::Leaf>, marker::KV>,
942 ) {
943 if self.node.len() < CAPACITY {
944 // SAFETY: There is enough space in the node for insertion.
945 let handle = unsafe { self.insert_fit(key, val) };
946 (None, handle.dormant())
947 } else {
948 let (middle_kv_idx, insertion) = splitpoint(self.idx);
949 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
950 let mut result = middle.split(alloc);
951 let insertion_edge = match insertion {
952 LeftOrRight::Left(insert_idx) => unsafe {
953 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
954 },
955 LeftOrRight::Right(insert_idx) => unsafe {
956 Handle::new_edge(result.right.borrow_mut(), insert_idx)
957 },
958 };
959 // SAFETY: We just split the node, so there is enough space for
960 // insertion.
961 let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() };
962 (Some(result), handle)
963 }
964 }
965}
966
967impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
968 /// Fixes the parent pointer and index in the child node that this edge
969 /// links to. This is useful when the ordering of edges has been changed,
970 fn correct_parent_link(self) {
971 // Create backpointer without invalidating other references to the node.
972 let ptr: NonNull> = unsafe { NonNull::new_unchecked(ptr:NodeRef::as_internal_ptr(&self.node)) };
973 let idx: usize = self.idx;
974 let mut child: NodeRef, K, V, LeafOrInternal> = self.descend();
975 child.set_parent_link(parent:ptr, parent_idx:idx);
976 }
977}
978
979impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
980 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
981 /// between this edge and the key-value pair to the right of this edge. This method assumes
982 /// that there is enough space in the node for the new pair to fit.
983 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
984 debug_assert!(self.node.len() < CAPACITY);
985 debug_assert!(edge.height == self.node.height - 1);
986 let new_len = self.node.len() + 1;
987
988 unsafe {
989 slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
990 slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
991 slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
992 *self.node.len_mut() = new_len as u16;
993
994 self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
995 }
996 }
997
998 /// Inserts a new key-value pair and an edge that will go to the right of that new pair
999 /// between this edge and the key-value pair to the right of this edge. This method splits
1000 /// the node if there isn't enough room.
1001 fn insert<A: Allocator + Clone>(
1002 mut self,
1003 key: K,
1004 val: V,
1005 edge: Root<K, V>,
1006 alloc: A,
1007 ) -> Option<SplitResult<'a, K, V, marker::Internal>> {
1008 assert!(edge.height == self.node.height - 1);
1009
1010 if self.node.len() < CAPACITY {
1011 self.insert_fit(key, val, edge);
1012 None
1013 } else {
1014 let (middle_kv_idx, insertion) = splitpoint(self.idx);
1015 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
1016 let mut result = middle.split(alloc);
1017 let mut insertion_edge = match insertion {
1018 LeftOrRight::Left(insert_idx) => unsafe {
1019 Handle::new_edge(result.left.reborrow_mut(), insert_idx)
1020 },
1021 LeftOrRight::Right(insert_idx) => unsafe {
1022 Handle::new_edge(result.right.borrow_mut(), insert_idx)
1023 },
1024 };
1025 insertion_edge.insert_fit(key, val, edge);
1026 Some(result)
1027 }
1028 }
1029}
1030
1031impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1032 /// Inserts a new key-value pair between the key-value pairs to the right and left of
1033 /// this edge. This method splits the node if there isn't enough room, and tries to
1034 /// insert the split off portion into the parent node recursively, until the root is reached.
1035 ///
1036 /// If the returned result is some `SplitResult`, the `left` field will be the root node.
1037 /// The returned pointer points to the inserted value, which in the case of `SplitResult`
1038 /// is in the `left` or `right` tree.
1039 pub fn insert_recursing<A: Allocator + Clone>(
1040 self,
1041 key: K,
1042 value: V,
1043 alloc: A,
1044 split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>),
1045 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1046 let (mut split, handle) = match self.insert(key, value, alloc.clone()) {
1047 // SAFETY: we have finished splitting and can now re-awaken the
1048 // handle to the inserted element.
1049 (None, handle) => return unsafe { handle.awaken() },
1050 (Some(split), handle) => (split.forget_node_type(), handle),
1051 };
1052
1053 loop {
1054 split = match split.left.ascend() {
1055 Ok(parent) => {
1056 match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
1057 // SAFETY: we have finished splitting and can now re-awaken the
1058 // handle to the inserted element.
1059 None => return unsafe { handle.awaken() },
1060 Some(split) => split.forget_node_type(),
1061 }
1062 }
1063 Err(root) => {
1064 split_root(SplitResult { left: root, ..split });
1065 // SAFETY: we have finished splitting and can now re-awaken the
1066 // handle to the inserted element.
1067 return unsafe { handle.awaken() };
1068 }
1069 };
1070 }
1071 }
1072}
1073
1074impl<BorrowType: marker::BorrowType, K, V>
1075 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
1076{
1077 /// Finds the node pointed to by this edge.
1078 ///
1079 /// The method name assumes you picture trees with the root node on top.
1080 ///
1081 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1082 /// both, upon success, do nothing.
1083 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1084 const {
1085 assert!(BorrowType::TRAVERSAL_PERMIT);
1086 }
1087
1088 // We need to use raw pointers to nodes because, if BorrowType is
1089 // marker::ValMut, there might be outstanding mutable references to
1090 // values that we must not invalidate. There's no worry accessing the
1091 // height field because that value is copied. Beware that, once the
1092 // node pointer is dereferenced, we access the edges array with a
1093 // reference (Rust issue #73987) and invalidate any other references
1094 // to or inside the array, should any be around.
1095 let parent_ptr: *mut InternalNode = NodeRef::as_internal_ptr(&self.node);
1096 let node: NonNull> = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
1097 NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
1098 }
1099}
1100
1101impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1102 pub fn into_kv(self) -> (&'a K, &'a V) {
1103 debug_assert!(self.idx < self.node.len());
1104 let leaf: &LeafNode = self.node.into_leaf();
1105 let k: &K = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
1106 let v: &V = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
1107 (k, v)
1108 }
1109}
1110
1111impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1112 pub fn key_mut(&mut self) -> &mut K {
1113 unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
1114 }
1115
1116 pub fn into_val_mut(self) -> &'a mut V {
1117 debug_assert!(self.idx < self.node.len());
1118 let leaf: &mut LeafNode = self.node.into_leaf_mut();
1119 unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
1120 }
1121
1122 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1123 debug_assert!(self.idx < self.node.len());
1124 let leaf: &mut LeafNode = self.node.into_leaf_mut();
1125 let k: &mut K = unsafe { leaf.keys.get_unchecked_mut(self.idx).assume_init_mut() };
1126 let v: &mut V = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
1127 (k, v)
1128 }
1129}
1130
1131impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1132 pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1133 unsafe { self.node.into_key_val_mut_at(self.idx) }
1134 }
1135}
1136
1137impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1138 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1139 debug_assert!(self.idx < self.node.len());
1140 // We cannot call separate key and value methods, because calling the second one
1141 // invalidates the reference returned by the first.
1142 unsafe {
1143 let leaf: &mut LeafNode = self.node.as_leaf_mut();
1144 let key: &mut K = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
1145 let val: &mut V = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
1146 (key, val)
1147 }
1148 }
1149
1150 /// Replaces the key and value that the KV handle refers to.
1151 pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
1152 let (key: &mut K, val: &mut V) = self.kv_mut();
1153 (mem::replace(dest:key, src:k), mem::replace(dest:val, src:v))
1154 }
1155}
1156
1157impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
1158 /// Extracts the key and value that the KV handle refers to.
1159 /// # Safety
1160 /// The node that the handle refers to must not yet have been deallocated.
1161 pub unsafe fn into_key_val(mut self) -> (K, V) {
1162 debug_assert!(self.idx < self.node.len());
1163 let leaf = self.node.as_leaf_dying();
1164 unsafe {
1165 let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
1166 let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
1167 (key, val)
1168 }
1169 }
1170
1171 /// Drops the key and value that the KV handle refers to.
1172 /// # Safety
1173 /// The node that the handle refers to must not yet have been deallocated.
1174 #[inline]
1175 pub unsafe fn drop_key_val(mut self) {
1176 debug_assert!(self.idx < self.node.len());
1177 let leaf = self.node.as_leaf_dying();
1178 unsafe {
1179 leaf.keys.get_unchecked_mut(self.idx).assume_init_drop();
1180 leaf.vals.get_unchecked_mut(self.idx).assume_init_drop();
1181 }
1182 }
1183}
1184
1185impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1186 /// Helps implementations of `split` for a particular `NodeType`,
1187 /// by taking care of leaf data.
1188 fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
1189 debug_assert!(self.idx < self.node.len());
1190 let old_len = self.node.len();
1191 let new_len = old_len - self.idx - 1;
1192 new_node.len = new_len as u16;
1193 unsafe {
1194 let k = self.node.key_area_mut(self.idx).assume_init_read();
1195 let v = self.node.val_area_mut(self.idx).assume_init_read();
1196
1197 move_to_slice(
1198 self.node.key_area_mut(self.idx + 1..old_len),
1199 &mut new_node.keys[..new_len],
1200 );
1201 move_to_slice(
1202 self.node.val_area_mut(self.idx + 1..old_len),
1203 &mut new_node.vals[..new_len],
1204 );
1205
1206 *self.node.len_mut() = self.idx as u16;
1207 (k, v)
1208 }
1209 }
1210}
1211
1212impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1213 /// Splits the underlying node into three parts:
1214 ///
1215 /// - The node is truncated to only contain the key-value pairs to the left of
1216 /// this handle.
1217 /// - The key and value pointed to by this handle are extracted.
1218 /// - All the key-value pairs to the right of this handle are put into a newly
1219 /// allocated node.
1220 pub fn split<A: Allocator + Clone>(mut self, alloc: A) -> SplitResult<'a, K, V, marker::Leaf> {
1221 let mut new_node = LeafNode::new(alloc);
1222
1223 let kv = self.split_leaf_data(&mut new_node);
1224
1225 let right = NodeRef::from_new_leaf(new_node);
1226 SplitResult { left: self.node, kv, right }
1227 }
1228
1229 /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
1230 /// that the key-value pair collapsed into.
1231 pub fn remove(
1232 mut self,
1233 ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1234 let old_len = self.node.len();
1235 unsafe {
1236 let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1237 let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
1238 *self.node.len_mut() = (old_len - 1) as u16;
1239 ((k, v), self.left_edge())
1240 }
1241 }
1242}
1243
1244impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1245 /// Splits the underlying node into three parts:
1246 ///
1247 /// - The node is truncated to only contain the edges and key-value pairs to the
1248 /// left of this handle.
1249 /// - The key and value pointed to by this handle are extracted.
1250 /// - All the edges and key-value pairs to the right of this handle are put into
1251 /// a newly allocated node.
1252 pub fn split<A: Allocator + Clone>(
1253 mut self,
1254 alloc: A,
1255 ) -> SplitResult<'a, K, V, marker::Internal> {
1256 let old_len = self.node.len();
1257 unsafe {
1258 let mut new_node = InternalNode::new(alloc);
1259 let kv = self.split_leaf_data(&mut new_node.data);
1260 let new_len = usize::from(new_node.data.len);
1261 move_to_slice(
1262 self.node.edge_area_mut(self.idx + 1..old_len + 1),
1263 &mut new_node.edges[..new_len + 1],
1264 );
1265
1266 let height = self.node.height;
1267 let right = NodeRef::from_new_internal(new_node, height);
1268
1269 SplitResult { left: self.node, kv, right }
1270 }
1271 }
1272}
1273
1274/// Represents a session for evaluating and performing a balancing operation
1275/// around an internal key-value pair.
1276pub struct BalancingContext<'a, K, V> {
1277 parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
1278 left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1279 right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1280}
1281
1282impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1283 pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
1284 let self1: Handle, K, …, …>, …> = unsafe { ptr::read(&self) };
1285 let self2: Handle, K, …, …>, …> = unsafe { ptr::read(&self) };
1286 BalancingContext {
1287 parent: self,
1288 left_child: self1.left_edge().descend(),
1289 right_child: self2.right_edge().descend(),
1290 }
1291 }
1292}
1293
1294impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1295 /// Chooses a balancing context involving the node as a child, thus between
1296 /// the KV immediately to the left or to the right in the parent node.
1297 /// Returns an `Err` if there is no parent.
1298 /// Panics if the parent is empty.
1299 ///
1300 /// Prefers the left side, to be optimal if the given node is somehow
1301 /// underfull, meaning here only that it has fewer elements than its left
1302 /// sibling and than its right sibling, if they exist. In that case,
1303 /// merging with the left sibling is faster, since we only need to move
1304 /// the node's N elements, instead of shifting them to the right and moving
1305 /// more than N elements in front. Stealing from the left sibling is also
1306 /// typically faster, since we only need to shift the node's N elements to
1307 /// the right, instead of shifting at least N of the sibling's elements to
1308 /// the left.
1309 pub fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
1310 match unsafe { ptr::read(&self) }.ascend() {
1311 Ok(parent_edge) => match parent_edge.left_kv() {
1312 Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
1313 parent: unsafe { ptr::read(&left_parent_kv) },
1314 left_child: left_parent_kv.left_edge().descend(),
1315 right_child: self,
1316 })),
1317 Err(parent_edge) => match parent_edge.right_kv() {
1318 Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
1319 parent: unsafe { ptr::read(&right_parent_kv) },
1320 left_child: self,
1321 right_child: right_parent_kv.right_edge().descend(),
1322 })),
1323 Err(_) => unreachable!("empty internal node"),
1324 },
1325 },
1326 Err(root) => Err(root),
1327 }
1328 }
1329}
1330
1331impl<'a, K, V> BalancingContext<'a, K, V> {
1332 pub fn left_child_len(&self) -> usize {
1333 self.left_child.len()
1334 }
1335
1336 pub fn right_child_len(&self) -> usize {
1337 self.right_child.len()
1338 }
1339
1340 pub fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1341 self.left_child
1342 }
1343
1344 pub fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1345 self.right_child
1346 }
1347
1348 /// Returns whether merging is possible, i.e., whether there is enough room
1349 /// in a node to combine the central KV with both adjacent child nodes.
1350 pub fn can_merge(&self) -> bool {
1351 self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
1352 }
1353}
1354
1355impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
1356 /// Performs a merge and lets a closure decide what to return.
1357 fn do_merge<
1358 F: FnOnce(
1359 NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
1360 NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1361 ) -> R,
1362 R,
1363 A: Allocator,
1364 >(
1365 self,
1366 result: F,
1367 alloc: A,
1368 ) -> R {
1369 let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
1370 let old_parent_len = parent_node.len();
1371 let mut left_node = self.left_child;
1372 let old_left_len = left_node.len();
1373 let mut right_node = self.right_child;
1374 let right_len = right_node.len();
1375 let new_left_len = old_left_len + 1 + right_len;
1376
1377 assert!(new_left_len <= CAPACITY);
1378
1379 unsafe {
1380 *left_node.len_mut() = new_left_len as u16;
1381
1382 let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1383 left_node.key_area_mut(old_left_len).write(parent_key);
1384 move_to_slice(
1385 right_node.key_area_mut(..right_len),
1386 left_node.key_area_mut(old_left_len + 1..new_left_len),
1387 );
1388
1389 let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1390 left_node.val_area_mut(old_left_len).write(parent_val);
1391 move_to_slice(
1392 right_node.val_area_mut(..right_len),
1393 left_node.val_area_mut(old_left_len + 1..new_left_len),
1394 );
1395
1396 slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
1397 parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
1398 *parent_node.len_mut() -= 1;
1399
1400 if parent_node.height > 1 {
1401 // SAFETY: the height of the nodes being merged is one below the height
1402 // of the node of this edge, thus above zero, so they are internal.
1403 let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
1404 let mut right_node = right_node.cast_to_internal_unchecked();
1405 move_to_slice(
1406 right_node.edge_area_mut(..right_len + 1),
1407 left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
1408 );
1409
1410 left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1411
1412 alloc.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1413 } else {
1414 alloc.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1415 }
1416 }
1417 result(parent_node, left_node)
1418 }
1419
1420 /// Merges the parent's key-value pair and both adjacent child nodes into
1421 /// the left child node and returns the shrunk parent node.
1422 ///
1423 /// Panics unless we `.can_merge()`.
1424 pub fn merge_tracking_parent<A: Allocator + Clone>(
1425 self,
1426 alloc: A,
1427 ) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
1428 self.do_merge(|parent, _child| parent, alloc)
1429 }
1430
1431 /// Merges the parent's key-value pair and both adjacent child nodes into
1432 /// the left child node and returns that child node.
1433 ///
1434 /// Panics unless we `.can_merge()`.
1435 pub fn merge_tracking_child<A: Allocator + Clone>(
1436 self,
1437 alloc: A,
1438 ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
1439 self.do_merge(|_parent, child| child, alloc)
1440 }
1441
1442 /// Merges the parent's key-value pair and both adjacent child nodes into
1443 /// the left child node and returns the edge handle in that child node
1444 /// where the tracked child edge ended up,
1445 ///
1446 /// Panics unless we `.can_merge()`.
1447 pub fn merge_tracking_child_edge<A: Allocator + Clone>(
1448 self,
1449 track_edge_idx: LeftOrRight<usize>,
1450 alloc: A,
1451 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1452 let old_left_len = self.left_child.len();
1453 let right_len = self.right_child.len();
1454 assert!(match track_edge_idx {
1455 LeftOrRight::Left(idx) => idx <= old_left_len,
1456 LeftOrRight::Right(idx) => idx <= right_len,
1457 });
1458 let child = self.merge_tracking_child(alloc);
1459 let new_idx = match track_edge_idx {
1460 LeftOrRight::Left(idx) => idx,
1461 LeftOrRight::Right(idx) => old_left_len + 1 + idx,
1462 };
1463 unsafe { Handle::new_edge(child, new_idx) }
1464 }
1465
1466 /// Removes a key-value pair from the left child and places it in the key-value storage
1467 /// of the parent, while pushing the old parent key-value pair into the right child.
1468 /// Returns a handle to the edge in the right child corresponding to where the original
1469 /// edge specified by `track_right_edge_idx` ended up.
1470 pub fn steal_left(
1471 mut self,
1472 track_right_edge_idx: usize,
1473 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1474 self.bulk_steal_left(1);
1475 unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
1476 }
1477
1478 /// Removes a key-value pair from the right child and places it in the key-value storage
1479 /// of the parent, while pushing the old parent key-value pair onto the left child.
1480 /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
1481 /// which didn't move.
1482 pub fn steal_right(
1483 mut self,
1484 track_left_edge_idx: usize,
1485 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1486 self.bulk_steal_right(1);
1487 unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
1488 }
1489
1490 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1491 pub fn bulk_steal_left(&mut self, count: usize) {
1492 assert!(count > 0);
1493 unsafe {
1494 let left_node = &mut self.left_child;
1495 let old_left_len = left_node.len();
1496 let right_node = &mut self.right_child;
1497 let old_right_len = right_node.len();
1498
1499 // Make sure that we may steal safely.
1500 assert!(old_right_len + count <= CAPACITY);
1501 assert!(old_left_len >= count);
1502
1503 let new_left_len = old_left_len - count;
1504 let new_right_len = old_right_len + count;
1505 *left_node.len_mut() = new_left_len as u16;
1506 *right_node.len_mut() = new_right_len as u16;
1507
1508 // Move leaf data.
1509 {
1510 // Make room for stolen elements in the right child.
1511 slice_shr(right_node.key_area_mut(..new_right_len), count);
1512 slice_shr(right_node.val_area_mut(..new_right_len), count);
1513
1514 // Move elements from the left child to the right one.
1515 move_to_slice(
1516 left_node.key_area_mut(new_left_len + 1..old_left_len),
1517 right_node.key_area_mut(..count - 1),
1518 );
1519 move_to_slice(
1520 left_node.val_area_mut(new_left_len + 1..old_left_len),
1521 right_node.val_area_mut(..count - 1),
1522 );
1523
1524 // Move the left-most stolen pair to the parent.
1525 let k = left_node.key_area_mut(new_left_len).assume_init_read();
1526 let v = left_node.val_area_mut(new_left_len).assume_init_read();
1527 let (k, v) = self.parent.replace_kv(k, v);
1528
1529 // Move parent's key-value pair to the right child.
1530 right_node.key_area_mut(count - 1).write(k);
1531 right_node.val_area_mut(count - 1).write(v);
1532 }
1533
1534 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1535 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1536 // Make room for stolen edges.
1537 slice_shr(right.edge_area_mut(..new_right_len + 1), count);
1538
1539 // Steal edges.
1540 move_to_slice(
1541 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1542 right.edge_area_mut(..count),
1543 );
1544
1545 right.correct_childrens_parent_links(0..new_right_len + 1);
1546 }
1547 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1548 _ => unreachable!(),
1549 }
1550 }
1551 }
1552
1553 /// The symmetric clone of `bulk_steal_left`.
1554 pub fn bulk_steal_right(&mut self, count: usize) {
1555 assert!(count > 0);
1556 unsafe {
1557 let left_node = &mut self.left_child;
1558 let old_left_len = left_node.len();
1559 let right_node = &mut self.right_child;
1560 let old_right_len = right_node.len();
1561
1562 // Make sure that we may steal safely.
1563 assert!(old_left_len + count <= CAPACITY);
1564 assert!(old_right_len >= count);
1565
1566 let new_left_len = old_left_len + count;
1567 let new_right_len = old_right_len - count;
1568 *left_node.len_mut() = new_left_len as u16;
1569 *right_node.len_mut() = new_right_len as u16;
1570
1571 // Move leaf data.
1572 {
1573 // Move the right-most stolen pair to the parent.
1574 let k = right_node.key_area_mut(count - 1).assume_init_read();
1575 let v = right_node.val_area_mut(count - 1).assume_init_read();
1576 let (k, v) = self.parent.replace_kv(k, v);
1577
1578 // Move parent's key-value pair to the left child.
1579 left_node.key_area_mut(old_left_len).write(k);
1580 left_node.val_area_mut(old_left_len).write(v);
1581
1582 // Move elements from the right child to the left one.
1583 move_to_slice(
1584 right_node.key_area_mut(..count - 1),
1585 left_node.key_area_mut(old_left_len + 1..new_left_len),
1586 );
1587 move_to_slice(
1588 right_node.val_area_mut(..count - 1),
1589 left_node.val_area_mut(old_left_len + 1..new_left_len),
1590 );
1591
1592 // Fill gap where stolen elements used to be.
1593 slice_shl(right_node.key_area_mut(..old_right_len), count);
1594 slice_shl(right_node.val_area_mut(..old_right_len), count);
1595 }
1596
1597 match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
1598 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1599 // Steal edges.
1600 move_to_slice(
1601 right.edge_area_mut(..count),
1602 left.edge_area_mut(old_left_len + 1..new_left_len + 1),
1603 );
1604
1605 // Fill gap where stolen edges used to be.
1606 slice_shl(right.edge_area_mut(..old_right_len + 1), count);
1607
1608 left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
1609 right.correct_childrens_parent_links(0..new_right_len + 1);
1610 }
1611 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1612 _ => unreachable!(),
1613 }
1614 }
1615 }
1616}
1617
1618impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1619 pub fn forget_node_type(
1620 self,
1621 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1622 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1623 }
1624}
1625
1626impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1627 pub fn forget_node_type(
1628 self,
1629 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1630 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1631 }
1632}
1633
1634impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1635 pub fn forget_node_type(
1636 self,
1637 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1638 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1639 }
1640}
1641
1642impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
1643 /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1644 pub fn force(
1645 self,
1646 ) -> ForceResult<
1647 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
1648 Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
1649 > {
1650 match self.node.force() {
1651 ForceResult::Leaf(node: NodeRef) => {
1652 ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1653 }
1654 ForceResult::Internal(node: NodeRef) => {
1655 ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1656 }
1657 }
1658 }
1659}
1660
1661impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
1662 /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
1663 pub unsafe fn cast_to_leaf_unchecked(
1664 self,
1665 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
1666 let node: NodeRef, K, V, Leaf> = unsafe { self.node.cast_to_leaf_unchecked() };
1667 Handle { node, idx: self.idx, _marker: PhantomData }
1668 }
1669}
1670
1671impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1672 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1673 /// The first edge of `right` remains unchanged.
1674 pub fn move_suffix(
1675 &mut self,
1676 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1677 ) {
1678 unsafe {
1679 let new_left_len = self.idx;
1680 let mut left_node = self.reborrow_mut().into_node();
1681 let old_left_len = left_node.len();
1682
1683 let new_right_len = old_left_len - new_left_len;
1684 let mut right_node = right.reborrow_mut();
1685
1686 assert!(right_node.len() == 0);
1687 assert!(left_node.height == right_node.height);
1688
1689 if new_right_len > 0 {
1690 *left_node.len_mut() = new_left_len as u16;
1691 *right_node.len_mut() = new_right_len as u16;
1692
1693 move_to_slice(
1694 left_node.key_area_mut(new_left_len..old_left_len),
1695 right_node.key_area_mut(..new_right_len),
1696 );
1697 move_to_slice(
1698 left_node.val_area_mut(new_left_len..old_left_len),
1699 right_node.val_area_mut(..new_right_len),
1700 );
1701 match (left_node.force(), right_node.force()) {
1702 (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
1703 move_to_slice(
1704 left.edge_area_mut(new_left_len + 1..old_left_len + 1),
1705 right.edge_area_mut(1..new_right_len + 1),
1706 );
1707 right.correct_childrens_parent_links(1..new_right_len + 1);
1708 }
1709 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1710 _ => unreachable!(),
1711 }
1712 }
1713 }
1714 }
1715}
1716
1717pub enum ForceResult<Leaf, Internal> {
1718 Leaf(Leaf),
1719 Internal(Internal),
1720}
1721
1722/// Result of insertion, when a node needed to expand beyond its capacity.
1723pub struct SplitResult<'a, K, V, NodeType> {
1724 // Altered node in existing tree with elements and edges that belong to the left of `kv`.
1725 pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
1726 // Some key and value that existed before and were split off, to be inserted elsewhere.
1727 pub kv: (K, V),
1728 // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
1729 pub right: NodeRef<marker::Owned, K, V, NodeType>,
1730}
1731
1732impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
1733 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1734 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1735 }
1736}
1737
1738impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
1739 pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
1740 SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
1741 }
1742}
1743
1744pub mod marker {
1745 use core::marker::PhantomData;
1746
1747 pub enum Leaf {}
1748 pub enum Internal {}
1749 pub enum LeafOrInternal {}
1750
1751 pub enum Owned {}
1752 pub enum Dying {}
1753 pub enum DormantMut {}
1754 pub struct Immut<'a>(PhantomData<&'a ()>);
1755 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1756 pub struct ValMut<'a>(PhantomData<&'a mut ()>);
1757
1758 pub trait BorrowType {
1759 /// If node references of this borrow type allow traversing to other
1760 /// nodes in the tree, this constant is set to `true`. It can be used
1761 /// for a compile-time assertion.
1762 const TRAVERSAL_PERMIT: bool = true;
1763 }
1764 impl BorrowType for Owned {
1765 /// Reject traversal, because it isn't needed. Instead traversal
1766 /// happens using the result of `borrow_mut`.
1767 /// By disabling traversal, and only creating new references to roots,
1768 /// we know that every reference of the `Owned` type is to a root node.
1769 const TRAVERSAL_PERMIT: bool = false;
1770 }
1771 impl BorrowType for Dying {}
1772 impl<'a> BorrowType for Immut<'a> {}
1773 impl<'a> BorrowType for Mut<'a> {}
1774 impl<'a> BorrowType for ValMut<'a> {}
1775 impl BorrowType for DormantMut {}
1776
1777 pub enum KV {}
1778 pub enum Edge {}
1779}
1780
1781/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
1782///
1783/// # Safety
1784/// The slice has more than `idx` elements.
1785unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
1786 unsafe {
1787 let len: usize = slice.len();
1788 debug_assert!(len > idx);
1789 let slice_ptr: *mut MaybeUninit = slice.as_mut_ptr();
1790 if len > idx + 1 {
1791 ptr::copy(src:slice_ptr.add(idx), dst:slice_ptr.add(idx + 1), count:len - idx - 1);
1792 }
1793 (*slice_ptr.add(count:idx)).write(val);
1794 }
1795}
1796
1797/// Removes and returns a value from a slice of all initialized elements, leaving behind one
1798/// trailing uninitialized element.
1799///
1800/// # Safety
1801/// The slice has more than `idx` elements.
1802unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
1803 unsafe {
1804 let len: usize = slice.len();
1805 debug_assert!(idx < len);
1806 let slice_ptr: *mut MaybeUninit = slice.as_mut_ptr();
1807 let ret: T = (*slice_ptr.add(count:idx)).assume_init_read();
1808 ptr::copy(src:slice_ptr.add(idx + 1), dst:slice_ptr.add(idx), count:len - idx - 1);
1809 ret
1810 }
1811}
1812
1813/// Shifts the elements in a slice `distance` positions to the left.
1814///
1815/// # Safety
1816/// The slice has at least `distance` elements.
1817unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1818 unsafe {
1819 let slice_ptr: *mut MaybeUninit = slice.as_mut_ptr();
1820 ptr::copy(src:slice_ptr.add(distance), dst:slice_ptr, count:slice.len() - distance);
1821 }
1822}
1823
1824/// Shifts the elements in a slice `distance` positions to the right.
1825///
1826/// # Safety
1827/// The slice has at least `distance` elements.
1828unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
1829 unsafe {
1830 let slice_ptr: *mut MaybeUninit = slice.as_mut_ptr();
1831 ptr::copy(src:slice_ptr, dst:slice_ptr.add(distance), count:slice.len() - distance);
1832 }
1833}
1834
1835/// Moves all values from a slice of initialized elements to a slice
1836/// of uninitialized elements, leaving behind `src` as all uninitialized.
1837/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
1838fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
1839 assert!(src.len() == dst.len());
1840 unsafe {
1841 ptr::copy_nonoverlapping(src:src.as_ptr(), dst:dst.as_mut_ptr(), count:src.len());
1842 }
1843}
1844
1845#[cfg(test)]
1846mod tests;
1847