1 // This is an attempt at an implementation following the ideal
4 // struct BTreeMap<K, V> {
6 // root: Option<Box<Node<K, V, height>>>
9 // struct Node<K, V, height: usize> {
10 // keys: [K; 2 * B - 1],
11 // vals: [V; 2 * B - 1],
12 // edges: if height > 0 {
13 // [Box<Node<K, V, height - 1>>; 2 * B]
15 // parent: *const Node<K, V, height + 1>,
21 // Since Rust doesn't actually have dependent types and polymorphic recursion,
22 // we make do with lots of unsafety.
24 // A major goal of this module is to avoid complexity by treating the tree as a generic (if
25 // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
26 // this module doesn't care whether the entries are sorted, which nodes can be underfull, or
27 // even what underfull means. However, we do rely on a few invariants:
29 // - Trees must have uniform depth/height. This means that every path down to a leaf from a
30 // given node has exactly the same length.
31 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
32 // This implies that even an empty internal node has at least one edge.
34 use core::cmp::Ordering;
35 use core::marker::PhantomData;
36 use core::mem::{self, MaybeUninit};
37 use core::ptr::{self, NonNull, Unique};
40 use crate::alloc::{AllocRef, Global, Layout};
41 use crate::boxed::Box;
44 pub const MIN_LEN: usize = B - 1;
45 pub const CAPACITY: usize = 2 * B - 1;
46 const KV_IDX_CENTER: usize = B - 1;
47 const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
48 const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
50 /// The underlying representation of leaf nodes.
52 struct LeafNode<K, V> {
53 /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
54 /// This either points to an actual node or is null.
55 parent: *const InternalNode<K, V>,
57 /// This node's index into the parent node's `edges` array.
58 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
59 /// This is only guaranteed to be initialized when `parent` is non-null.
60 parent_idx: MaybeUninit<u16>,
62 /// The number of keys and values this node stores.
64 /// This next to `parent_idx` to encourage the compiler to join `len` and
65 /// `parent_idx` into the same 32-bit word, reducing space overhead.
68 /// The arrays storing the actual data of the node. Only the first `len` elements of each
69 /// array are initialized and valid.
70 keys: [MaybeUninit<K>; CAPACITY],
71 vals: [MaybeUninit<V>; CAPACITY],
74 impl<K, V> LeafNode<K, V> {
75 /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
76 /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
77 unsafe fn new() -> Self {
79 // As a general policy, we leave fields uninitialized if they can be, as this should
80 // be both slightly faster and easier to track in Valgrind.
81 keys: MaybeUninit::uninit_array(),
82 vals: MaybeUninit::uninit_array(),
84 parent_idx: MaybeUninit::uninit(),
90 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
91 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
92 /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
93 /// node, allowing code to act on leaf and internal nodes generically without having to even check
94 /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
96 struct InternalNode<K, V> {
99 /// The pointers to the children of this node. `len + 1` of these are considered
100 /// initialized and valid. Although during the process of `into_iter` or `drop`,
101 /// some pointers are dangling while others still need to be traversed.
102 edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
105 impl<K, V> InternalNode<K, V> {
106 /// Creates a new `InternalNode`.
108 /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
109 /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
110 /// edges are initialized and valid, meaning that even when the node is empty (having a
111 /// `len` of 0), there must be one initialized and valid edge. This function does not set up
113 unsafe fn new() -> Self {
114 InternalNode { data: unsafe { LeafNode::new() }, edges: MaybeUninit::uninit_array() }
118 /// A managed, non-null pointer to a node. This is either an owned pointer to
119 /// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
121 /// However, `BoxedNode` contains no information as to which of the two types
122 /// of nodes it actually contains, and, partially due to this lack of information,
123 /// has no destructor.
124 struct BoxedNode<K, V> {
125 ptr: Unique<LeafNode<K, V>>,
128 impl<K, V> BoxedNode<K, V> {
129 fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
130 BoxedNode { ptr: Box::into_unique(node) }
133 fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
134 BoxedNode { ptr: Box::into_unique(node).cast() }
137 unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
138 BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
141 fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
142 NonNull::from(self.ptr)
148 /// Note that this does not have a destructor, and must be cleaned up manually.
149 pub struct Root<K, V> {
150 node: BoxedNode<K, V>,
151 /// The number of levels below the root node.
155 unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> {}
156 unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
158 impl<K, V> Root<K, V> {
159 /// Returns the number of levels below the root.
160 pub fn height(&self) -> usize {
164 /// Returns a new owned tree, with its own root node that is initially empty.
165 pub fn new_leaf() -> Self {
166 Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 }
169 /// Borrows and returns an immutable reference to the node owned by the root.
170 pub fn node_as_ref(&self) -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
171 NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
174 /// Borrows and returns a mutable reference to the node owned by the root.
175 pub fn node_as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
176 NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
179 pub fn node_as_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, marker::LeafOrInternal> {
180 NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
183 pub fn into_ref(self) -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
184 NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData }
187 /// Adds a new internal node with a single edge, pointing to the previous root, and make that
188 /// new node the root. This increases the height by 1 and is the opposite of
189 /// `pop_internal_level`.
190 pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
191 let mut new_node = Box::new(unsafe { InternalNode::new() });
192 new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
194 self.node = BoxedNode::from_internal(new_node);
198 NodeRef { height: self.height, node: self.node.as_ptr(), _marker: PhantomData };
201 ret.reborrow_mut().first_edge().correct_parent_link();
207 /// Removes the internal root node, using its first child as the new root node.
208 /// As it is intended only to be called when the root node has only one child,
209 /// no cleanup is done on any of the other children.
210 /// This decreases the height by 1 and is the opposite of `push_internal_level`.
212 /// Requires exclusive access to the `Root` object but not to the root node;
213 /// it will not invalidate existing handles or references to the root node.
215 /// Panics if there is no internal level, i.e., if the root node is a leaf.
216 pub fn pop_internal_level(&mut self) {
217 assert!(self.height > 0);
219 let top = self.node.ptr;
223 self.node_as_mut().cast_unchecked::<marker::Internal>().first_edge().descend().node,
227 self.node_as_mut().as_leaf_mut().parent = ptr::null();
230 Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
235 // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
236 // is `Mut`. This is technically wrong, but cannot result in any unsafety due to
237 // internal use of `NodeRef` because we stay completely generic over `K` and `V`.
238 // However, whenever a public type wraps `NodeRef`, make sure that it has the
240 /// A reference to a node.
242 /// This type has a number of parameters that controls how it acts:
243 /// - `BorrowType`: This can be `Immut<'a>`, `Mut<'a>` or `ValMut<'a>' for some `'a`
245 /// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
246 /// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
247 /// when this is `ValMut<'a>`, the `NodeRef` acts as immutable with respect
248 /// to keys and tree structure, but allows mutable references to values,
249 /// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
250 /// - `K` and `V`: These control what types of things are stored in the nodes.
251 /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
252 /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
253 /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
254 /// `NodeRef` could be pointing to either type of node.
255 pub struct NodeRef<BorrowType, K, V, Type> {
256 /// The number of levels below the node.
258 node: NonNull<LeafNode<K, V>>,
259 _marker: PhantomData<(BorrowType, Type)>,
262 impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
263 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
264 fn clone(&self) -> Self {
269 unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
271 unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
272 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
273 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::ValMut<'a>, K, V, Type> {}
274 unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
276 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
277 /// Exposes the data of an internal node for reading.
279 /// Returns a raw ptr to avoid invalidating other references to this node,
280 /// which is possible when BorrowType is marker::ValMut.
281 fn as_internal_ptr(&self) -> *const InternalNode<K, V> {
282 self.node.as_ptr() as *const InternalNode<K, V>
286 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
287 /// Exposes the data of an internal node for reading,
288 /// when we know we have exclusive access.
289 fn as_internal(&mut self) -> &InternalNode<K, V> {
290 unsafe { &*self.as_internal_ptr() }
294 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
295 /// Exposes the data of an internal node for writing.
297 /// We don't need to return a raw ptr because we have unique access to the entire node.
298 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
299 unsafe { &mut *(self.node.as_ptr() as *mut InternalNode<K, V>) }
303 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
304 /// Finds the length of the node. This is the number of keys or values. In an
305 /// internal node, the number of edges is `len() + 1`.
306 /// For any node, the number of possible edge handles is also `len() + 1`.
307 /// Note that, despite being safe, calling this function can have the side effect
308 /// of invalidating mutable references that unsafe code has created.
309 pub fn len(&self) -> usize {
310 // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
311 // there might be outstanding mutable references to values that we must not invalidate.
312 unsafe { (*self.as_leaf_ptr()).len as usize }
315 /// Returns the height of this node in the whole tree. Zero height denotes the
317 pub fn height(&self) -> usize {
321 /// Temporarily takes out another, immutable reference to the same node.
322 fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
323 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
326 /// Exposes the leaf "portion" of any leaf or internal node.
327 /// If the node is a leaf, this function simply opens up its data.
328 /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
329 /// (header, keys and values), and this function exposes that.
331 /// Returns a raw ptr to avoid invalidating other references to this node,
332 /// which is possible when BorrowType is marker::ValMut.
333 fn as_leaf_ptr(&self) -> *const LeafNode<K, V> {
334 // The node must be valid for at least the LeafNode portion.
335 // This is not a reference in the NodeRef type because we don't know if
336 // it should be unique or shared.
340 /// Borrows a reference to one of the keys stored in the node.
343 /// The node has more than `idx` initialized elements.
344 pub unsafe fn key_at(&self, idx: usize) -> &K {
345 unsafe { self.reborrow().into_key_at(idx) }
348 /// Borrows a reference to one of the values stored in the node.
351 /// The node has more than `idx` initialized elements.
352 unsafe fn val_at(&self, idx: usize) -> &V {
353 unsafe { self.reborrow().into_val_at(idx) }
356 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
357 /// node actually has a parent, where `handle` points to the edge of the parent
358 /// that points to the current node. Returns `Err(self)` if the current node has
359 /// no parent, giving back the original `NodeRef`.
361 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
362 /// both, upon success, do nothing.
365 ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
366 // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
367 // there might be outstanding mutable references to values that we must not invalidate.
368 let parent_as_leaf = unsafe { (*self.as_leaf_ptr()).parent as *const LeafNode<K, V> };
369 if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
371 node: NodeRef { height: self.height + 1, node: non_zero, _marker: PhantomData },
372 idx: unsafe { usize::from(*(*self.as_leaf_ptr()).parent_idx.as_ptr()) },
373 _marker: PhantomData,
380 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
381 unsafe { Handle::new_edge(self, 0) }
384 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
385 let len = self.len();
386 unsafe { Handle::new_edge(self, len) }
389 /// Note that `self` must be nonempty.
390 pub fn first_kv(self) -> Handle<Self, marker::KV> {
391 let len = self.len();
393 unsafe { Handle::new_kv(self, 0) }
396 /// Note that `self` must be nonempty.
397 pub fn last_kv(self) -> Handle<Self, marker::KV> {
398 let len = self.len();
400 unsafe { Handle::new_kv(self, len - 1) }
404 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
405 /// Exposes the data of a leaf node for reading in an immutable tree.
406 fn into_leaf(self) -> &'a LeafNode<K, V> {
407 // SAFETY: we can access the entire node freely and do no need raw pointers,
408 // because there can be no mutable references to this Immut tree.
409 unsafe { &(*self.as_leaf_ptr()) }
413 impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
414 /// Similar to `ascend`, gets a reference to a node's parent node, but also
415 /// deallocate the current node in the process. This is unsafe because the
416 /// current node will still be accessible despite being deallocated.
417 pub unsafe fn deallocate_and_ascend(
419 ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
420 let height = self.height;
421 let node = self.node;
422 let ret = self.ascend().ok();
427 Layout::new::<InternalNode<K, V>>()
429 Layout::new::<LeafNode<K, V>>()
437 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
438 /// Unsafely asserts to the compiler some static information about whether this
439 /// node is a `Leaf` or an `Internal`.
440 unsafe fn cast_unchecked<NewType>(self) -> NodeRef<marker::Mut<'a>, K, V, NewType> {
441 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
444 /// Temporarily takes out another, mutable reference to the same node. Beware, as
445 /// this method is very dangerous, doubly so since it may not immediately appear
448 /// Because mutable pointers can roam anywhere around the tree, the returned
449 /// pointer can easily be used to make the original pointer dangling, out of
450 /// bounds, or invalid under stacked borrow rules.
451 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
452 // that restricts the use of navigation methods on reborrowed pointers,
453 // preventing this unsafety.
454 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
455 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
458 /// Exposes the leaf "portion" of any leaf or internal node for writing.
459 /// If the node is a leaf, this function simply opens up its data.
460 /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
461 /// (header, keys and values), and this function exposes that.
463 /// We don't need to return a raw ptr because we have unique access to the entire node.
464 fn as_leaf_mut(&mut self) -> &'a mut LeafNode<K, V> {
465 unsafe { &mut (*self.node.as_ptr()) }
468 /// Borrows a mutable reference to one of the keys stored in the node.
471 /// The node has more than `idx` initialized elements.
472 pub unsafe fn key_mut_at(&mut self, idx: usize) -> &mut K {
473 unsafe { self.reborrow_mut().into_key_mut_at(idx) }
476 /// Borrows a mutable reference to one of the values stored in the node.
479 /// The node has more than `idx` initialized elements.
480 pub unsafe fn val_mut_at(&mut self, idx: usize) -> &mut V {
481 unsafe { self.reborrow_mut().into_val_mut_at(idx) }
484 fn keys_mut(&mut self) -> &mut [K] {
485 // SAFETY: the caller will not be able to call further methods on self
486 // until the key slice reference is dropped, as we have unique access
487 // for the lifetime of the borrow.
488 unsafe { self.reborrow_mut().into_key_slice_mut() }
491 fn vals_mut(&mut self) -> &mut [V] {
492 // SAFETY: the caller will not be able to call further methods on self
493 // until the value slice reference is dropped, as we have unique access
494 // for the lifetime of the borrow.
495 unsafe { self.reborrow_mut().into_val_slice_mut() }
499 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
501 /// The node has more than `idx` initialized elements.
502 unsafe fn into_key_at(self, idx: usize) -> &'a K {
503 unsafe { self.into_leaf().keys.get_unchecked(idx).assume_init_ref() }
507 /// The node has more than `idx` initialized elements.
508 unsafe fn into_val_at(self, idx: usize) -> &'a V {
509 unsafe { self.into_leaf().vals.get_unchecked(idx).assume_init_ref() }
513 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
514 fn into_key_slice_mut(mut self) -> &'a mut [K] {
515 // SAFETY: The keys of a node must always be initialized up to length.
517 slice::from_raw_parts_mut(
518 MaybeUninit::slice_as_mut_ptr(&mut self.as_leaf_mut().keys),
524 fn into_val_slice_mut(mut self) -> &'a mut [V] {
525 // SAFETY: The values of a node must always be initialized up to length.
527 slice::from_raw_parts_mut(
528 MaybeUninit::slice_as_mut_ptr(&mut self.as_leaf_mut().vals),
535 /// The node has more than `idx` initialized elements.
536 unsafe fn into_key_mut_at(mut self, idx: usize) -> &'a mut K {
537 debug_assert!(idx < self.len());
539 let leaf = self.as_leaf_mut();
540 unsafe { leaf.keys.get_unchecked_mut(idx).assume_init_mut() }
544 /// The node has more than `idx` initialized elements.
545 unsafe fn into_val_mut_at(mut self, idx: usize) -> &'a mut V {
546 debug_assert!(idx < self.len());
548 let leaf = self.as_leaf_mut();
549 unsafe { leaf.vals.get_unchecked_mut(idx).assume_init_mut() }
553 impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
555 /// The node has more than `idx` initialized elements.
556 unsafe fn into_key_val_mut_at(self, idx: usize) -> (&'a K, &'a mut V) {
557 // We only create a reference to the one element we are interested in,
558 // to avoid aliasing with outstanding references to other elements,
559 // in particular, those returned to the caller in earlier iterations.
560 let leaf = self.node.as_ptr();
561 // We must coerce to unsized array pointers because of Rust issue #74679.
562 let keys: *const [_] = unsafe { &raw const (*leaf).keys };
563 let vals: *mut [_] = unsafe { &raw mut (*leaf).vals };
564 // SAFETY: The keys and values of a node must always be initialized up to length.
565 let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
566 let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
571 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
572 /// Adds a key/value pair to the end of the node.
573 pub fn push(&mut self, key: K, val: V) {
574 let len = &mut self.as_leaf_mut().len;
575 let idx = *len as usize;
576 assert!(idx < CAPACITY);
579 ptr::write(self.key_mut_at(idx), key);
580 ptr::write(self.val_mut_at(idx), val);
584 /// Adds a key/value pair to the beginning of the node.
585 pub fn push_front(&mut self, key: K, val: V) {
586 assert!(self.len() < CAPACITY);
589 slice_insert(self.keys_mut(), 0, key);
590 slice_insert(self.vals_mut(), 0, val);
592 self.as_leaf_mut().len += 1;
596 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
598 /// 'first' and 'after_last' must be in range.
599 unsafe fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
600 debug_assert!(first <= self.len());
601 debug_assert!(after_last <= self.len() + 1);
602 for i in first..after_last {
603 unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
607 fn correct_all_childrens_parent_links(&mut self) {
608 let len = self.len();
609 unsafe { self.correct_childrens_parent_links(0, len + 1) };
613 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
614 /// Adds a key/value pair and an edge to go to the right of that pair to
615 /// the end of the node.
616 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
617 assert!(edge.height == self.height - 1);
619 let len = &mut self.as_leaf_mut().len;
620 let idx = *len as usize;
621 assert!(idx < CAPACITY);
624 ptr::write(self.key_mut_at(idx), key);
625 ptr::write(self.val_mut_at(idx), val);
626 self.as_internal_mut().edges.get_unchecked_mut(idx + 1).write(edge.node);
627 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
631 /// Adds a key/value pair and an edge to go to the left of that pair to
632 /// the beginning of the node.
633 pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
634 assert!(edge.height == self.height - 1);
635 assert!(self.len() < CAPACITY);
638 slice_insert(self.keys_mut(), 0, key);
639 slice_insert(self.vals_mut(), 0, val);
641 slice::from_raw_parts_mut(
642 MaybeUninit::slice_as_mut_ptr(&mut self.as_internal_mut().edges),
650 self.as_leaf_mut().len += 1;
652 self.correct_all_childrens_parent_links();
656 impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
657 /// Removes a key/value pair from the end of this node and returns the pair.
658 /// If this is an internal node, also removes the edge that was to the right
659 /// of that pair and returns the orphaned node that this edge owned with its
661 pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
662 assert!(self.len() > 0);
664 let idx = self.len() - 1;
667 let key = ptr::read(self.key_at(idx));
668 let val = ptr::read(self.val_at(idx));
669 let edge = match self.reborrow_mut().force() {
670 ForceResult::Leaf(_) => None,
671 ForceResult::Internal(mut internal) => {
673 ptr::read(internal.as_internal().edges.get_unchecked(idx + 1).as_ptr());
674 let mut new_root = Root { node: edge, height: internal.height - 1 };
675 new_root.node_as_mut().as_leaf_mut().parent = ptr::null();
680 self.as_leaf_mut().len -= 1;
685 /// Removes a key/value pair from the beginning of this node. If this is an internal node,
686 /// also removes the edge that was to the left of that pair.
687 pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
688 assert!(self.len() > 0);
690 let old_len = self.len();
693 let key = slice_remove(self.keys_mut(), 0);
694 let val = slice_remove(self.vals_mut(), 0);
695 let edge = match self.reborrow_mut().force() {
696 ForceResult::Leaf(_) => None,
697 ForceResult::Internal(mut internal) => {
698 let edge = slice_remove(
699 slice::from_raw_parts_mut(
700 MaybeUninit::slice_as_mut_ptr(&mut internal.as_internal_mut().edges),
706 let mut new_root = Root { node: edge, height: internal.height - 1 };
707 new_root.node_as_mut().as_leaf_mut().parent = ptr::null();
709 for i in 0..old_len {
710 Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
717 self.as_leaf_mut().len -= 1;
723 fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
724 (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr())
728 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
729 /// Checks whether a node is an `Internal` node or a `Leaf` node.
733 NodeRef<BorrowType, K, V, marker::Leaf>,
734 NodeRef<BorrowType, K, V, marker::Internal>,
736 if self.height == 0 {
737 ForceResult::Leaf(NodeRef {
740 _marker: PhantomData,
743 ForceResult::Internal(NodeRef {
746 _marker: PhantomData,
752 /// A reference to a specific key/value pair or edge within a node. The `Node` parameter
753 /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
754 /// pair) or `Edge` (signifying a handle on an edge).
756 /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
757 /// a child node, these represent the spaces where child pointers would go between the key/value
758 /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
759 /// to the left of the node, one between the two pairs, and one at the right of the node.
760 pub struct Handle<Node, Type> {
763 _marker: PhantomData<Type>,
766 impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
767 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
768 // `Clone`able is when it is an immutable reference and therefore `Copy`.
769 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
770 fn clone(&self) -> Self {
775 impl<Node, Type> Handle<Node, Type> {
776 /// Retrieves the node that contains the edge or key/value pair this handle points to.
777 pub fn into_node(self) -> Node {
781 /// Returns the position of this handle in the node.
782 pub fn idx(&self) -> usize {
787 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
788 /// Creates a new handle to a key/value pair in `node`.
789 /// Unsafe because the caller must ensure that `idx < node.len()`.
790 pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
791 debug_assert!(idx < node.len());
793 Handle { node, idx, _marker: PhantomData }
796 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
797 unsafe { Handle::new_edge(self.node, self.idx) }
800 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
801 unsafe { Handle::new_edge(self.node, self.idx + 1) }
805 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
806 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
808 fn eq(&self, other: &Self) -> bool {
809 self.node.node == other.node.node && self.idx == other.idx
813 impl<BorrowType, K, V, NodeType, HandleType> PartialOrd
814 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
816 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
817 if self.node.node == other.node.node { Some(self.idx.cmp(&other.idx)) } else { None }
821 impl<BorrowType, K, V, NodeType, HandleType>
822 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
824 /// Temporarily takes out another, immutable handle on the same location.
825 pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
826 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
827 Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
831 impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
832 /// Temporarily takes out another, mutable handle on the same location. Beware, as
833 /// this method is very dangerous, doubly so since it may not immediately appear
836 /// For details, see `NodeRef::reborrow_mut`.
837 pub unsafe fn reborrow_mut(
839 ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
840 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
841 Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
845 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
846 /// Creates a new handle to an edge in `node`.
847 /// Unsafe because the caller must ensure that `idx <= node.len()`.
848 pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
849 debug_assert!(idx <= node.len());
851 Handle { node, idx, _marker: PhantomData }
854 pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
856 Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
862 pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
863 if self.idx < self.node.len() {
864 Ok(unsafe { Handle::new_kv(self.node, self.idx) })
871 enum InsertionPlace {
876 /// Given an edge index where we want to insert into a node filled to capacity,
877 /// computes a sensible KV index of a split point and where to perform the insertion.
878 /// The goal of the split point is for its key and value to end up in a parent node;
879 /// the keys, values and edges to the left of the split point become the left child;
880 /// the keys, values and edges to the right of the split point become the right child.
881 fn splitpoint(edge_idx: usize) -> (usize, InsertionPlace) {
882 debug_assert!(edge_idx <= CAPACITY);
883 // Rust issue #74834 tries to explain these symmetric rules.
885 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, InsertionPlace::Left(edge_idx)),
886 EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, InsertionPlace::Left(edge_idx)),
887 EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, InsertionPlace::Right(0)),
888 _ => (KV_IDX_CENTER + 1, InsertionPlace::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
892 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::Edge> {
893 /// Helps implementations of `insert_fit` for a particular `NodeType`,
894 /// by taking care of leaf data.
895 /// Inserts a new key/value pair between the key/value pairs to the right and left of
896 /// this edge. This method assumes that there is enough space in the node for the new
898 fn leafy_insert_fit(&mut self, key: K, val: V) {
899 // Necessary for correctness, but in a private module
900 debug_assert!(self.node.len() < CAPACITY);
903 slice_insert(self.node.keys_mut(), self.idx, key);
904 slice_insert(self.node.vals_mut(), self.idx, val);
906 self.node.as_leaf_mut().len += 1;
911 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
912 /// Inserts a new key/value pair between the key/value pairs to the right and left of
913 /// this edge. This method assumes that there is enough space in the node for the new
916 /// The returned pointer points to the inserted value.
917 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
918 self.leafy_insert_fit(key, val);
919 unsafe { self.node.val_mut_at(self.idx) }
923 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
924 /// Inserts a new key/value pair between the key/value pairs to the right and left of
925 /// this edge. This method splits the node if there isn't enough room.
927 /// The returned pointer points to the inserted value.
928 fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
929 if self.node.len() < CAPACITY {
930 let ptr = self.insert_fit(key, val);
931 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
932 (InsertResult::Fit(kv), ptr)
934 let (middle_kv_idx, insertion) = splitpoint(self.idx);
935 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
936 let (mut left, k, v, mut right) = middle.split();
937 let ptr = match insertion {
938 InsertionPlace::Left(insert_idx) => unsafe {
939 Handle::new_edge(left.reborrow_mut(), insert_idx).insert_fit(key, val)
941 InsertionPlace::Right(insert_idx) => unsafe {
943 right.node_as_mut().cast_unchecked::<marker::Leaf>(),
946 .insert_fit(key, val)
949 (InsertResult::Split(SplitResult { left: left.forget_type(), k, v, right }), ptr)
954 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
955 /// Fixes the parent pointer and index in the child node below this edge. This is useful
956 /// when the ordering of edges has been changed, such as in the various `insert` methods.
957 fn correct_parent_link(mut self) {
958 let idx = self.idx as u16;
959 let ptr = self.node.as_internal_mut() as *mut _;
960 let mut child = self.descend();
961 child.as_leaf_mut().parent = ptr;
962 child.as_leaf_mut().parent_idx.write(idx);
966 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
967 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
968 /// between this edge and the key/value pair to the right of this edge. This method assumes
969 /// that there is enough space in the node for the new pair to fit.
970 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
971 // Necessary for correctness, but in an internal module
972 debug_assert!(self.node.len() < CAPACITY);
973 debug_assert!(edge.height == self.node.height - 1);
976 self.leafy_insert_fit(key, val);
979 slice::from_raw_parts_mut(
980 MaybeUninit::slice_as_mut_ptr(&mut self.node.as_internal_mut().edges),
987 for i in (self.idx + 1)..(self.node.len() + 1) {
988 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
993 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
994 /// between this edge and the key/value pair to the right of this edge. This method splits
995 /// the node if there isn't enough room.
1001 ) -> InsertResult<'a, K, V, marker::Internal> {
1002 assert!(edge.height == self.node.height - 1);
1004 if self.node.len() < CAPACITY {
1005 self.insert_fit(key, val, edge);
1006 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
1007 InsertResult::Fit(kv)
1009 let (middle_kv_idx, insertion) = splitpoint(self.idx);
1010 let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
1011 let (mut left, k, v, mut right) = middle.split();
1013 InsertionPlace::Left(insert_idx) => unsafe {
1014 Handle::new_edge(left.reborrow_mut(), insert_idx).insert_fit(key, val, edge);
1016 InsertionPlace::Right(insert_idx) => unsafe {
1018 right.node_as_mut().cast_unchecked::<marker::Internal>(),
1021 .insert_fit(key, val, edge);
1024 InsertResult::Split(SplitResult { left: left.forget_type(), k, v, right })
1029 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1030 /// Inserts a new key/value pair between the key/value pairs to the right and left of
1031 /// this edge. This method splits the node if there isn't enough room, and tries to
1032 /// insert the split off portion into the parent node recursively, until the root is reached.
1034 /// If the returned result is a `Fit`, its handle's node can be this edge's node or an ancestor.
1035 /// If the returned result is a `Split`, the `left` field will be the root node.
1036 /// The returned pointer points to the inserted value.
1037 pub fn insert_recursing(
1041 ) -> (InsertResult<'a, K, V, marker::LeafOrInternal>, *mut V) {
1042 let (mut split, val_ptr) = match self.insert(key, value) {
1043 (InsertResult::Fit(handle), ptr) => {
1044 return (InsertResult::Fit(handle.forget_node_type()), ptr);
1046 (InsertResult::Split(split), val_ptr) => (split, val_ptr),
1050 split = match split.left.ascend() {
1051 Ok(parent) => match parent.insert(split.k, split.v, split.right) {
1052 InsertResult::Fit(handle) => {
1053 return (InsertResult::Fit(handle.forget_node_type()), val_ptr);
1055 InsertResult::Split(split) => split,
1058 return (InsertResult::Split(SplitResult { left: root, ..split }), val_ptr);
1065 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1066 /// Finds the node pointed to by this edge.
1068 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1069 /// both, upon success, do nothing.
1070 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1071 // We need to use raw pointers to nodes because, if BorrowType is
1072 // marker::ValMut, there might be outstanding mutable references to
1073 // values that we must not invalidate. There's no worry accessing the
1074 // height field because that value is copied. Beware that, once the
1075 // node pointer is dereferenced, we access the edges array with a
1076 // reference (Rust issue #73987) and invalidate any other references
1077 // to or inside the array, should any be around.
1078 let internal_node = self.node.as_internal_ptr();
1080 height: self.node.height - 1,
1081 node: unsafe { (&*(*internal_node).edges.get_unchecked(self.idx).as_ptr()).as_ptr() },
1082 _marker: PhantomData,
1087 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1088 pub fn into_kv(self) -> (&'a K, &'a V) {
1089 (unsafe { self.node.into_key_at(self.idx) }, unsafe { self.node.into_val_at(self.idx) })
1093 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1094 pub fn into_key_mut(self) -> &'a mut K {
1095 unsafe { self.node.into_key_mut_at(self.idx) }
1098 pub fn into_val_mut(self) -> &'a mut V {
1099 unsafe { self.node.into_val_mut_at(self.idx) }
1103 impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
1104 pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
1105 unsafe { self.node.into_key_val_mut_at(self.idx) }
1109 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1110 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1111 // We cannot call into_key_mut_at and into_val_mut_at, because calling the second one
1112 // invalidates the reference returned by the first.
1113 let leaf = self.node.as_leaf_mut();
1114 let key = unsafe { leaf.keys.get_unchecked_mut(self.idx).assume_init_mut() };
1115 let val = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
1120 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1121 /// Helps implementations of `split` for a particular `NodeType`,
1122 /// by taking care of leaf data.
1123 fn leafy_split(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V, usize) {
1125 let k = ptr::read(self.node.key_at(self.idx));
1126 let v = ptr::read(self.node.val_at(self.idx));
1128 let new_len = self.node.len() - self.idx - 1;
1130 ptr::copy_nonoverlapping(
1131 self.node.key_at(self.idx + 1),
1132 new_node.keys.as_mut_ptr() as *mut K,
1135 ptr::copy_nonoverlapping(
1136 self.node.val_at(self.idx + 1),
1137 new_node.vals.as_mut_ptr() as *mut V,
1141 self.node.as_leaf_mut().len = self.idx as u16;
1142 new_node.len = new_len as u16;
1148 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1149 /// Splits the underlying node into three parts:
1151 /// - The node is truncated to only contain the key/value pairs to the right of
1153 /// - The key and value pointed to by this handle and extracted.
1154 /// - All the key/value pairs to the right of this handle are put into a newly
1156 pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
1158 let mut new_node = Box::new(LeafNode::new());
1160 let (k, v, _) = self.leafy_split(&mut new_node);
1162 (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 })
1166 /// Removes the key/value pair pointed to by this handle and returns it, along with the edge
1167 /// that the key/value pair collapsed into.
1170 ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1172 let k = slice_remove(self.node.keys_mut(), self.idx);
1173 let v = slice_remove(self.node.vals_mut(), self.idx);
1174 self.node.as_leaf_mut().len -= 1;
1175 ((k, v), self.left_edge())
1180 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1181 /// Returns `true` if it is valid to call `.merge()`, i.e., whether there is enough room in
1182 /// a node to hold the combination of the nodes to the left and right of this handle along
1183 /// with the key/value pair at this handle.
1184 pub fn can_merge(&self) -> bool {
1185 (self.reborrow().left_edge().descend().len()
1186 + self.reborrow().right_edge().descend().len()
1192 impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1193 /// Splits the underlying node into three parts:
1195 /// - The node is truncated to only contain the edges and key/value pairs to the
1196 /// right of this handle.
1197 /// - The key and value pointed to by this handle and extracted.
1198 /// - All the edges and key/value pairs to the right of this handle are put into
1199 /// a newly allocated node.
1200 pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
1202 let mut new_node = Box::new(InternalNode::new());
1204 let (k, v, new_len) = self.leafy_split(&mut new_node.data);
1205 let height = self.node.height;
1206 let old_node = &*self.node.as_internal_ptr();
1208 ptr::copy_nonoverlapping(
1209 old_node.edges.as_ptr().add(self.idx + 1),
1210 new_node.edges.as_mut_ptr(),
1214 let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
1216 for i in 0..(new_len + 1) {
1217 Handle::new_edge(new_root.node_as_mut().cast_unchecked(), i).correct_parent_link();
1220 (self.node, k, v, new_root)
1224 /// Combines the node immediately to the left of this handle, the key/value pair pointed
1225 /// to by this handle, and the node immediately to the right of this handle into one new
1226 /// child of the underlying node, returning an edge referencing that new child.
1228 /// Panics unless this edge `.can_merge()`.
1231 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1232 let self1 = unsafe { ptr::read(&self) };
1233 let self2 = unsafe { ptr::read(&self) };
1234 let mut left_node = self1.left_edge().descend();
1235 let left_len = left_node.len();
1236 let right_node = self2.right_edge().descend();
1237 let right_len = right_node.len();
1239 assert!(left_len + right_len < CAPACITY);
1243 left_node.keys_mut().get_unchecked_mut(left_len),
1244 slice_remove(self.node.keys_mut(), self.idx),
1246 ptr::copy_nonoverlapping(
1247 right_node.key_at(0),
1248 left_node.keys_mut().as_mut_ptr().add(left_len + 1),
1252 left_node.vals_mut().get_unchecked_mut(left_len),
1253 slice_remove(self.node.vals_mut(), self.idx),
1255 ptr::copy_nonoverlapping(
1256 right_node.val_at(0),
1257 left_node.vals_mut().as_mut_ptr().add(left_len + 1),
1261 slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
1262 for i in self.idx + 1..self.node.len() {
1263 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
1265 self.node.as_leaf_mut().len -= 1;
1267 left_node.as_leaf_mut().len += right_len as u16 + 1;
1269 if self.node.height > 1 {
1270 // SAFETY: the height of the nodes being merged is one below the height
1271 // of the node of this edge, thus above zero, so they are internal.
1272 let mut left_node = left_node.cast_unchecked();
1273 let mut right_node = right_node.cast_unchecked();
1274 ptr::copy_nonoverlapping(
1275 right_node.as_internal().edges.as_ptr(),
1276 left_node.as_internal_mut().edges.as_mut_ptr().add(left_len + 1),
1280 for i in left_len + 1..left_len + right_len + 2 {
1281 Handle::new_edge(left_node.reborrow_mut(), i).correct_parent_link();
1284 Global.dealloc(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
1286 Global.dealloc(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
1289 Handle::new_edge(self.node, self.idx)
1293 /// This removes a key/value pair from the left child and places it in the key/value storage
1294 /// pointed to by this handle while pushing the old key/value pair of this handle into the right
1296 pub fn steal_left(&mut self) {
1298 let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
1300 let k = mem::replace(self.kv_mut().0, k);
1301 let v = mem::replace(self.kv_mut().1, v);
1303 match self.reborrow_mut().right_edge().descend().force() {
1304 ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
1305 ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()),
1310 /// This removes a key/value pair from the right child and places it in the key/value storage
1311 /// pointed to by this handle while pushing the old key/value pair of this handle into the left
1313 pub fn steal_right(&mut self) {
1315 let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
1317 let k = mem::replace(self.kv_mut().0, k);
1318 let v = mem::replace(self.kv_mut().1, v);
1320 match self.reborrow_mut().left_edge().descend().force() {
1321 ForceResult::Leaf(mut leaf) => leaf.push(k, v),
1322 ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()),
1327 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1328 pub fn bulk_steal_left(&mut self, count: usize) {
1330 let mut left_node = ptr::read(self).left_edge().descend();
1331 let left_len = left_node.len();
1332 let mut right_node = ptr::read(self).right_edge().descend();
1333 let right_len = right_node.len();
1335 // Make sure that we may steal safely.
1336 assert!(right_len + count <= CAPACITY);
1337 assert!(left_len >= count);
1339 let new_left_len = left_len - count;
1343 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1344 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1346 let kv = self.kv_mut();
1347 (kv.0 as *mut K, kv.1 as *mut V)
1350 // Make room for stolen elements in the right child.
1351 ptr::copy(right_kv.0, right_kv.0.add(count), right_len);
1352 ptr::copy(right_kv.1, right_kv.1.add(count), right_len);
1354 // Move elements from the left child to the right one.
1355 move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
1357 // Move parent's key/value pair to the right child.
1358 move_kv(parent_kv, 0, right_kv, count - 1, 1);
1360 // Move the left-most stolen pair to the parent.
1361 move_kv(left_kv, new_left_len, parent_kv, 0, 1);
1364 left_node.as_leaf_mut().len -= count as u16;
1365 right_node.as_leaf_mut().len += count as u16;
1367 match (left_node.force(), right_node.force()) {
1368 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1369 // Make room for stolen edges.
1370 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1371 ptr::copy(right_edges, right_edges.add(count), right_len + 1);
1372 right.correct_childrens_parent_links(count, count + right_len + 1);
1374 move_edges(left, new_left_len + 1, right, 0, count);
1376 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1384 /// The symmetric clone of `bulk_steal_left`.
1385 pub fn bulk_steal_right(&mut self, count: usize) {
1387 let mut left_node = ptr::read(self).left_edge().descend();
1388 let left_len = left_node.len();
1389 let mut right_node = ptr::read(self).right_edge().descend();
1390 let right_len = right_node.len();
1392 // Make sure that we may steal safely.
1393 assert!(left_len + count <= CAPACITY);
1394 assert!(right_len >= count);
1396 let new_right_len = right_len - count;
1400 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1401 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1403 let kv = self.kv_mut();
1404 (kv.0 as *mut K, kv.1 as *mut V)
1407 // Move parent's key/value pair to the left child.
1408 move_kv(parent_kv, 0, left_kv, left_len, 1);
1410 // Move elements from the right child to the left one.
1411 move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
1413 // Move the right-most stolen pair to the parent.
1414 move_kv(right_kv, count - 1, parent_kv, 0, 1);
1416 // Fix right indexing
1417 ptr::copy(right_kv.0.add(count), right_kv.0, new_right_len);
1418 ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len);
1421 left_node.as_leaf_mut().len += count as u16;
1422 right_node.as_leaf_mut().len -= count as u16;
1424 match (left_node.force(), right_node.force()) {
1425 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1426 move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
1428 // Fix right indexing.
1429 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1430 ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
1431 right.correct_childrens_parent_links(0, new_right_len + 1);
1433 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1442 unsafe fn move_kv<K, V>(
1443 source: (*mut K, *mut V),
1444 source_offset: usize,
1445 dest: (*mut K, *mut V),
1450 ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
1451 ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
1455 // Source and destination must have the same height.
1456 unsafe fn move_edges<K, V>(
1457 mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
1458 source_offset: usize,
1459 mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
1463 let source_ptr = source.as_internal().edges.as_ptr();
1464 let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
1466 ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
1467 dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
1471 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
1472 /// Removes any static information asserting that this node is a `Leaf` node.
1473 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1474 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
1478 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
1479 /// Removes any static information asserting that this node is an `Internal` node.
1480 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1481 NodeRef { height: self.height, node: self.node, _marker: PhantomData }
1485 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1486 pub fn forget_node_type(
1488 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1489 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1493 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1494 pub fn forget_node_type(
1496 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1497 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1501 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1502 pub fn forget_node_type(
1504 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1505 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1509 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV> {
1510 pub fn forget_node_type(
1512 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1513 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1517 impl<BorrowType, K, V, HandleType>
1518 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType>
1520 /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1524 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
1525 Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>,
1527 match self.node.force() {
1528 ForceResult::Leaf(node) => {
1529 ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1531 ForceResult::Internal(node) => {
1532 ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1538 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1539 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1540 /// The first edge of `right` remains unchanged.
1543 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1546 let left_new_len = self.idx;
1547 let mut left_node = self.reborrow_mut().into_node();
1549 let right_new_len = left_node.len() - left_new_len;
1550 let mut right_node = right.reborrow_mut();
1552 assert!(right_node.len() == 0);
1553 assert!(left_node.height == right_node.height);
1555 if right_new_len > 0 {
1556 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1557 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1559 move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
1561 left_node.as_leaf_mut().len = left_new_len as u16;
1562 right_node.as_leaf_mut().len = right_new_len as u16;
1564 match (left_node.force(), right_node.force()) {
1565 (ForceResult::Internal(left), ForceResult::Internal(right)) => {
1566 move_edges(left, left_new_len + 1, right, 1, right_new_len);
1568 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1578 pub enum ForceResult<Leaf, Internal> {
1583 /// Result of insertion, when a node needed to expand beyond its capacity.
1584 /// Does not distinguish between `Leaf` and `Internal` because `Root` doesn't.
1585 pub struct SplitResult<'a, K, V> {
1586 // Altered node in existing tree with elements and edges that belong to the left of `k`.
1587 pub left: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1588 // Some key and value split off, to be inserted elsewhere.
1591 // Owned, unattached, new node with elements and edges that belong to the right of `k`.
1592 pub right: Root<K, V>,
1595 pub enum InsertResult<'a, K, V, Type> {
1596 Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
1597 Split(SplitResult<'a, K, V>),
1601 use core::marker::PhantomData;
1604 pub enum Internal {}
1605 pub enum LeafOrInternal {}
1608 pub struct Immut<'a>(PhantomData<&'a ()>);
1609 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1610 pub struct ValMut<'a>(PhantomData<&'a mut ()>);
1616 unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
1618 ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
1619 ptr::write(slice.get_unchecked_mut(idx), val);
1623 unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
1625 let ret = ptr::read(slice.get_unchecked(idx));
1626 ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);