1 // This is an attempt at an implementation following the ideal
4 // struct BTreeMap<K, V> {
6 // root: Option<Box<Node<K, V, height>>>
9 // struct Node<K, V, height: usize> {
10 // keys: [K; 2 * B - 1],
11 // vals: [V; 2 * B - 1],
12 // edges: if height > 0 {
13 // [Box<Node<K, V, height - 1>>; 2 * B]
15 // parent: *const Node<K, V, height + 1>,
21 // Since Rust doesn't actually have dependent types and polymorphic recursion,
22 // we make do with lots of unsafety.
24 // A major goal of this module is to avoid complexity by treating the tree as a generic (if
25 // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
26 // this module doesn't care whether the entries are sorted, which nodes can be underfull, or
27 // even what underfull means. However, we do rely on a few invariants:
29 // - Trees must have uniform depth/height. This means that every path down to a leaf from a
30 // given node has exactly the same length.
31 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
32 // This implies that even an empty internal node has at least one edge.
34 use core::cmp::Ordering;
35 use core::marker::PhantomData;
36 use core::mem::{self, MaybeUninit};
37 use core::ptr::{self, NonNull, Unique};
40 use crate::alloc::{AllocRef, Global, Layout};
41 use crate::boxed::Box;
44 pub const MIN_LEN: usize = B - 1;
45 pub const CAPACITY: usize = 2 * B - 1;
47 /// The underlying representation of leaf nodes.
49 struct LeafNode<K, V> {
50 /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
51 /// This either points to an actual node or is null.
52 parent: *const InternalNode<K, V>,
54 /// This node's index into the parent node's `edges` array.
55 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
56 /// This is only guaranteed to be initialized when `parent` is non-null.
57 parent_idx: MaybeUninit<u16>,
59 /// The number of keys and values this node stores.
61 /// This next to `parent_idx` to encourage the compiler to join `len` and
62 /// `parent_idx` into the same 32-bit word, reducing space overhead.
65 /// The arrays storing the actual data of the node. Only the first `len` elements of each
66 /// array are initialized and valid.
67 keys: [MaybeUninit<K>; CAPACITY],
68 vals: [MaybeUninit<V>; CAPACITY],
71 impl<K, V> LeafNode<K, V> {
72 /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
73 /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
74 unsafe fn new() -> Self {
76 // As a general policy, we leave fields uninitialized if they can be, as this should
77 // be both slightly faster and easier to track in Valgrind.
78 keys: [MaybeUninit::UNINIT; CAPACITY],
79 vals: [MaybeUninit::UNINIT; CAPACITY],
81 parent_idx: MaybeUninit::uninit(),
87 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
88 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
89 /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
90 /// node, allowing code to act on leaf and internal nodes generically without having to even check
91 /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
93 struct InternalNode<K, V> {
96 /// The pointers to the children of this node. `len + 1` of these are considered
97 /// initialized and valid. Although during the process of `into_iter` or `drop`,
98 /// some pointers are dangling while others still need to be traversed.
99 edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
102 impl<K, V> InternalNode<K, V> {
103 /// Creates a new `InternalNode`.
105 /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
106 /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
107 /// edges are initialized and valid, meaning that even when the node is empty (having a
108 /// `len` of 0), there must be one initialized and valid edge. This function does not set up
110 unsafe fn new() -> Self {
111 InternalNode { data: unsafe { LeafNode::new() }, edges: [MaybeUninit::UNINIT; 2 * B] }
115 /// A managed, non-null pointer to a node. This is either an owned pointer to
116 /// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
118 /// However, `BoxedNode` contains no information as to which of the two types
119 /// of nodes it actually contains, and, partially due to this lack of information,
120 /// has no destructor.
121 struct BoxedNode<K, V> {
122 ptr: Unique<LeafNode<K, V>>,
125 impl<K, V> BoxedNode<K, V> {
126 fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
127 BoxedNode { ptr: Box::into_unique(node) }
130 fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
131 BoxedNode { ptr: Box::into_unique(node).cast() }
134 unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
135 BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
138 fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
139 NonNull::from(self.ptr)
145 /// Note that this does not have a destructor, and must be cleaned up manually.
146 pub struct Root<K, V> {
147 node: BoxedNode<K, V>,
148 /// The number of levels below the root node.
152 unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> {}
153 unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
155 impl<K, V> Root<K, V> {
156 /// Returns the number of levels below the root.
157 pub fn height(&self) -> usize {
161 /// Returns a new owned tree, with its own root node that is initially empty.
162 pub fn new_leaf() -> Self {
163 Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 }
166 /// Borrows and returns an immutable reference to the node owned by the root.
167 pub fn node_as_ref(&self) -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
170 node: self.node.as_ptr(),
172 _marker: PhantomData,
176 /// Borrows and returns a mutable reference to the node owned by the root.
177 pub fn node_as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
180 node: self.node.as_ptr(),
181 root: self as *mut _,
182 _marker: PhantomData,
186 pub fn into_ref(self) -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
189 node: self.node.as_ptr(),
191 _marker: PhantomData,
195 /// Adds a new internal node with a single edge, pointing to the previous root, and make that
196 /// new node the root. This increases the height by 1 and is the opposite of
197 /// `pop_internal_level`.
198 pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
199 let mut new_node = Box::new(unsafe { InternalNode::new() });
200 new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
202 self.node = BoxedNode::from_internal(new_node);
205 let mut ret = NodeRef {
207 node: self.node.as_ptr(),
208 root: self as *mut _,
209 _marker: PhantomData,
213 ret.reborrow_mut().first_edge().correct_parent_link();
219 /// Removes the internal root node, using its first child as the new root.
220 /// As it is intended only to be called when the root has only one child,
221 /// no cleanup is done on any of the other children of the root.
222 /// This decreases the height by 1 and is the opposite of `push_internal_level`.
223 /// Panics if there is no internal level, i.e. if the root is a leaf.
224 pub fn pop_internal_level(&mut self) {
225 assert!(self.height > 0);
227 let top = self.node.ptr;
231 self.node_as_mut().cast_unchecked::<marker::Internal>().first_edge().descend().node,
236 (*self.node_as_mut().as_leaf_mut()).parent = ptr::null();
240 Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
245 // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
246 // is `Mut`. This is technically wrong, but cannot result in any unsafety due to
247 // internal use of `NodeRef` because we stay completely generic over `K` and `V`.
248 // However, whenever a public type wraps `NodeRef`, make sure that it has the
250 /// A reference to a node.
252 /// This type has a number of parameters that controls how it acts:
253 /// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
254 /// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
255 /// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
256 /// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
257 /// - `K` and `V`: These control what types of things are stored in the nodes.
258 /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
259 /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
260 /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
261 /// `NodeRef` could be pointing to either type of node.
262 pub struct NodeRef<BorrowType, K, V, Type> {
263 /// The number of levels below the node.
265 node: NonNull<LeafNode<K, V>>,
266 // `root` is null unless the borrow type is `Mut`
267 root: *const Root<K, V>,
268 _marker: PhantomData<(BorrowType, Type)>,
271 impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
272 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
273 fn clone(&self) -> Self {
278 unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
280 unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
281 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
282 unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
284 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
285 fn as_internal(&self) -> &InternalNode<K, V> {
286 unsafe { &*(self.node.as_ptr() as *mut InternalNode<K, V>) }
290 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
291 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
292 unsafe { &mut *(self.node.as_ptr() as *mut InternalNode<K, V>) }
296 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
297 /// Finds the length of the node. This is the number of keys or values. In an
298 /// internal node, the number of edges is `len() + 1`.
299 /// For any node, the number of possible edge handles is also `len() + 1`.
300 /// Note that, despite being safe, calling this function can have the side effect
301 /// of invalidating mutable references that unsafe code has created.
302 pub fn len(&self) -> usize {
303 self.as_leaf().len as usize
306 /// Returns the height of this node in the whole tree. Zero height denotes the
308 pub fn height(&self) -> usize {
312 /// Temporarily takes out another, immutable reference to the same node.
313 fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
314 NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
317 /// Exposes the leaf "portion" of any leaf or internal node.
318 /// If the node is a leaf, this function simply opens up its data.
319 /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
320 /// (header, keys and values), and this function exposes that.
321 fn as_leaf(&self) -> &LeafNode<K, V> {
322 // The node must be valid for at least the LeafNode portion.
323 // This is not a reference in the NodeRef type because we don't know if
324 // it should be unique or shared.
325 unsafe { self.node.as_ref() }
328 /// Borrows a view into the keys stored in the node.
329 pub fn keys(&self) -> &[K] {
330 self.reborrow().into_key_slice()
333 /// Borrows a view into the values stored in the node.
334 fn vals(&self) -> &[V] {
335 self.reborrow().into_val_slice()
338 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
339 /// node actually has a parent, where `handle` points to the edge of the parent
340 /// that points to the current node. Returns `Err(self)` if the current node has
341 /// no parent, giving back the original `NodeRef`.
343 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
344 /// both, upon success, do nothing.
347 ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
348 let parent_as_leaf = self.as_leaf().parent as *const LeafNode<K, V>;
349 if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
352 height: self.height + 1,
355 _marker: PhantomData,
357 idx: unsafe { usize::from(*self.as_leaf().parent_idx.as_ptr()) },
358 _marker: PhantomData,
365 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
366 unsafe { Handle::new_edge(self, 0) }
369 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
370 let len = self.len();
371 unsafe { Handle::new_edge(self, len) }
374 /// Note that `self` must be nonempty.
375 pub fn first_kv(self) -> Handle<Self, marker::KV> {
376 let len = self.len();
378 unsafe { Handle::new_kv(self, 0) }
381 /// Note that `self` must be nonempty.
382 pub fn last_kv(self) -> Handle<Self, marker::KV> {
383 let len = self.len();
385 unsafe { Handle::new_kv(self, len - 1) }
389 impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
390 /// Similar to `ascend`, gets a reference to a node's parent node, but also
391 /// deallocate the current node in the process. This is unsafe because the
392 /// current node will still be accessible despite being deallocated.
393 pub unsafe fn deallocate_and_ascend(
395 ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
396 let height = self.height;
397 let node = self.node;
398 let ret = self.ascend().ok();
403 Layout::new::<InternalNode<K, V>>()
405 Layout::new::<LeafNode<K, V>>()
413 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
414 /// Unsafely asserts to the compiler some static information about whether this
415 /// node is a `Leaf` or an `Internal`.
416 unsafe fn cast_unchecked<NewType>(&mut self) -> NodeRef<marker::Mut<'_>, K, V, NewType> {
417 NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
420 /// Temporarily takes out another, mutable reference to the same node. Beware, as
421 /// this method is very dangerous, doubly so since it may not immediately appear
424 /// Because mutable pointers can roam anywhere around the tree and can even (through
425 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
426 /// can easily be used to make the original mutable pointer dangling, or, in the case
427 /// of a reborrowed handle, out of bounds.
428 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
429 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
430 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
431 NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
434 /// Exposes the leaf "portion" of any leaf or internal node for writing.
435 /// If the node is a leaf, this function simply opens up its data.
436 /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
437 /// (header, keys and values), and this function exposes that.
439 /// Returns a raw ptr to avoid asserting exclusive access to the entire node.
440 fn as_leaf_mut(&mut self) -> *mut LeafNode<K, V> {
444 fn keys_mut(&mut self) -> &mut [K] {
445 // SAFETY: the caller will not be able to call further methods on self
446 // until the key slice reference is dropped, as we have unique access
447 // for the lifetime of the borrow.
448 unsafe { self.reborrow_mut().into_key_slice_mut() }
451 fn vals_mut(&mut self) -> &mut [V] {
452 // SAFETY: the caller will not be able to call further methods on self
453 // until the value slice reference is dropped, as we have unique access
454 // for the lifetime of the borrow.
455 unsafe { self.reborrow_mut().into_val_slice_mut() }
459 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
460 fn into_key_slice(self) -> &'a [K] {
461 unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().keys), self.len()) }
464 fn into_val_slice(self) -> &'a [V] {
465 unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len()) }
469 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
470 /// Gets a mutable reference to the root itself. This is useful primarily when the
471 /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
472 pub fn into_root_mut(self) -> &'a mut Root<K, V> {
473 unsafe { &mut *(self.root as *mut Root<K, V>) }
476 fn into_key_slice_mut(mut self) -> &'a mut [K] {
477 // SAFETY: The keys of a node must always be initialized up to length.
479 slice::from_raw_parts_mut(
480 MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys),
486 fn into_val_slice_mut(mut self) -> &'a mut [V] {
487 // SAFETY: The values of a node must always be initialized up to length.
489 slice::from_raw_parts_mut(
490 MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals),
496 fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
497 // We cannot use the getters here, because calling the second one
498 // invalidates the reference returned by the first.
499 // More precisely, it is the call to `len` that is the culprit,
500 // because that creates a shared reference to the header, which *can*
501 // overlap with the keys (and even the values, for ZST keys).
502 let len = self.len();
503 let leaf = self.as_leaf_mut();
504 // SAFETY: The keys and values of a node must always be initialized up to length.
506 slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len)
509 slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len)
515 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
516 /// Adds a key/value pair to the end of the node.
517 pub fn push(&mut self, key: K, val: V) {
518 assert!(self.len() < CAPACITY);
520 let idx = self.len();
523 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
524 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
526 (*self.as_leaf_mut()).len += 1;
530 /// Adds a key/value pair to the beginning of the node.
531 pub fn push_front(&mut self, key: K, val: V) {
532 assert!(self.len() < CAPACITY);
535 slice_insert(self.keys_mut(), 0, key);
536 slice_insert(self.vals_mut(), 0, val);
538 (*self.as_leaf_mut()).len += 1;
543 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
544 /// Adds a key/value pair and an edge to go to the right of that pair to
545 /// the end of the node.
546 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
547 assert!(edge.height == self.height - 1);
548 assert!(self.len() < CAPACITY);
550 let idx = self.len();
553 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
554 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
555 self.as_internal_mut().edges.get_unchecked_mut(idx + 1).write(edge.node);
557 (*self.as_leaf_mut()).len += 1;
559 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
563 // Unsafe because 'first' and 'after_last' must be in range
564 unsafe fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
565 debug_assert!(first <= self.len());
566 debug_assert!(after_last <= self.len() + 1);
567 for i in first..after_last {
568 unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
572 fn correct_all_childrens_parent_links(&mut self) {
573 let len = self.len();
574 unsafe { self.correct_childrens_parent_links(0, len + 1) };
577 /// Adds a key/value pair and an edge to go to the left of that pair to
578 /// the beginning of the node.
579 pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
580 assert!(edge.height == self.height - 1);
581 assert!(self.len() < CAPACITY);
584 slice_insert(self.keys_mut(), 0, key);
585 slice_insert(self.vals_mut(), 0, val);
587 slice::from_raw_parts_mut(
588 MaybeUninit::first_ptr_mut(&mut self.as_internal_mut().edges),
595 (*self.as_leaf_mut()).len += 1;
597 self.correct_all_childrens_parent_links();
602 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
603 /// Removes a key/value pair from the end of this node and returns the pair.
604 /// If this is an internal node, also removes the edge that was to the right
605 /// of that pair and returns the orphaned node that this edge owned with its
607 pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
608 assert!(self.len() > 0);
610 let idx = self.len() - 1;
613 let key = ptr::read(self.keys().get_unchecked(idx));
614 let val = ptr::read(self.vals().get_unchecked(idx));
615 let edge = match self.reborrow_mut().force() {
616 ForceResult::Leaf(_) => None,
617 ForceResult::Internal(internal) => {
619 ptr::read(internal.as_internal().edges.get_unchecked(idx + 1).as_ptr());
620 let mut new_root = Root { node: edge, height: internal.height - 1 };
621 (*new_root.node_as_mut().as_leaf_mut()).parent = ptr::null();
626 (*self.as_leaf_mut()).len -= 1;
631 /// Removes a key/value pair from the beginning of this node. If this is an internal node,
632 /// also removes the edge that was to the left of that pair.
633 pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
634 assert!(self.len() > 0);
636 let old_len = self.len();
639 let key = slice_remove(self.keys_mut(), 0);
640 let val = slice_remove(self.vals_mut(), 0);
641 let edge = match self.reborrow_mut().force() {
642 ForceResult::Leaf(_) => None,
643 ForceResult::Internal(mut internal) => {
644 let edge = slice_remove(
645 slice::from_raw_parts_mut(
646 MaybeUninit::first_ptr_mut(&mut internal.as_internal_mut().edges),
652 let mut new_root = Root { node: edge, height: internal.height - 1 };
653 (*new_root.node_as_mut().as_leaf_mut()).parent = ptr::null();
655 for i in 0..old_len {
656 Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
663 (*self.as_leaf_mut()).len -= 1;
669 fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
670 (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr())
674 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
675 /// Checks whether a node is an `Internal` node or a `Leaf` node.
679 NodeRef<BorrowType, K, V, marker::Leaf>,
680 NodeRef<BorrowType, K, V, marker::Internal>,
682 if self.height == 0 {
683 ForceResult::Leaf(NodeRef {
687 _marker: PhantomData,
690 ForceResult::Internal(NodeRef {
694 _marker: PhantomData,
700 /// A reference to a specific key/value pair or edge within a node. The `Node` parameter
701 /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
702 /// pair) or `Edge` (signifying a handle on an edge).
704 /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
705 /// a child node, these represent the spaces where child pointers would go between the key/value
706 /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
707 /// to the left of the node, one between the two pairs, and one at the right of the node.
708 pub struct Handle<Node, Type> {
711 _marker: PhantomData<Type>,
714 impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
715 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
716 // `Clone`able is when it is an immutable reference and therefore `Copy`.
717 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
718 fn clone(&self) -> Self {
723 impl<Node, Type> Handle<Node, Type> {
724 /// Retrieves the node that contains the edge of key/value pair this handle points to.
725 pub fn into_node(self) -> Node {
729 /// Returns the position of this handle in the node.
730 pub fn idx(&self) -> usize {
735 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
736 /// Creates a new handle to a key/value pair in `node`.
737 /// Unsafe because the caller must ensure that `idx < node.len()`.
738 pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
739 debug_assert!(idx < node.len());
741 Handle { node, idx, _marker: PhantomData }
744 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
745 unsafe { Handle::new_edge(self.node, self.idx) }
748 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
749 unsafe { Handle::new_edge(self.node, self.idx + 1) }
753 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
754 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
756 fn eq(&self, other: &Self) -> bool {
757 self.node.node == other.node.node && self.idx == other.idx
761 impl<BorrowType, K, V, NodeType, HandleType> PartialOrd
762 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
764 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
765 if self.node.node == other.node.node { Some(self.idx.cmp(&other.idx)) } else { None }
769 impl<BorrowType, K, V, NodeType, HandleType>
770 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
772 /// Temporarily takes out another, immutable handle on the same location.
773 pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
774 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
775 Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
779 impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
780 /// Temporarily takes out another, mutable handle on the same location. Beware, as
781 /// this method is very dangerous, doubly so since it may not immediately appear
784 /// Because mutable pointers can roam anywhere around the tree and can even (through
785 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
786 /// can easily be used to make the original mutable pointer dangling, or, in the case
787 /// of a reborrowed handle, out of bounds.
788 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
789 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
790 pub unsafe fn reborrow_mut(
792 ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
793 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
794 Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
798 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
799 /// Creates a new handle to an edge in `node`.
800 /// Unsafe because the caller must ensure that `idx <= node.len()`.
801 pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
802 debug_assert!(idx <= node.len());
804 Handle { node, idx, _marker: PhantomData }
807 pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
809 Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
815 pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
816 if self.idx < self.node.len() {
817 Ok(unsafe { Handle::new_kv(self.node, self.idx) })
824 impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::Edge> {
825 /// Helps implementations of `insert_fit` for a particular `NodeType`,
826 /// by taking care of leaf data.
827 /// Inserts a new key/value pair between the key/value pairs to the right and left of
828 /// this edge. This method assumes that there is enough space in the node for the new
830 fn leafy_insert_fit(&mut self, key: K, val: V) {
831 // Necessary for correctness, but in a private module
832 debug_assert!(self.node.len() < CAPACITY);
835 slice_insert(self.node.keys_mut(), self.idx, key);
836 slice_insert(self.node.vals_mut(), self.idx, val);
838 (*self.node.as_leaf_mut()).len += 1;
843 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
844 /// Inserts a new key/value pair between the key/value pairs to the right and left of
845 /// this edge. This method assumes that there is enough space in the node for the new
848 /// The returned pointer points to the inserted value.
849 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
850 self.leafy_insert_fit(key, val);
851 unsafe { self.node.vals_mut().get_unchecked_mut(self.idx) }
855 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
856 /// Inserts a new key/value pair between the key/value pairs to the right and left of
857 /// this edge. This method splits the node if there isn't enough room.
859 /// The returned pointer points to the inserted value.
860 fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
861 if self.node.len() < CAPACITY {
862 let ptr = self.insert_fit(key, val);
863 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
864 (InsertResult::Fit(kv), ptr)
866 let middle = unsafe { Handle::new_kv(self.node, B) };
867 let (mut left, k, v, mut right) = middle.split();
868 let ptr = if self.idx <= B {
869 unsafe { Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val) }
873 right.node_as_mut().cast_unchecked::<marker::Leaf>(),
876 .insert_fit(key, val)
879 (InsertResult::Split(SplitResult { left: left.forget_type(), k, v, right }), ptr)
884 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
885 /// Fixes the parent pointer and index in the child node below this edge. This is useful
886 /// when the ordering of edges has been changed, such as in the various `insert` methods.
887 fn correct_parent_link(mut self) {
888 let idx = self.idx as u16;
889 let ptr = self.node.as_internal_mut() as *mut _;
890 let mut child = self.descend();
892 (*child.as_leaf_mut()).parent = ptr;
893 (*child.as_leaf_mut()).parent_idx.write(idx);
897 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
898 /// between this edge and the key/value pair to the right of this edge. This method assumes
899 /// that there is enough space in the node for the new pair to fit.
900 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
901 // Necessary for correctness, but in an internal module
902 debug_assert!(self.node.len() < CAPACITY);
903 debug_assert!(edge.height == self.node.height - 1);
906 self.leafy_insert_fit(key, val);
909 slice::from_raw_parts_mut(
910 MaybeUninit::first_ptr_mut(&mut self.node.as_internal_mut().edges),
917 for i in (self.idx + 1)..(self.node.len() + 1) {
918 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
923 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
924 /// between this edge and the key/value pair to the right of this edge. This method splits
925 /// the node if there isn't enough room.
931 ) -> InsertResult<'a, K, V, marker::Internal> {
932 assert!(edge.height == self.node.height - 1);
934 if self.node.len() < CAPACITY {
935 self.insert_fit(key, val, edge);
936 let kv = unsafe { Handle::new_kv(self.node, self.idx) };
937 InsertResult::Fit(kv)
939 let middle = unsafe { Handle::new_kv(self.node, B) };
940 let (mut left, k, v, mut right) = middle.split();
943 Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
948 right.node_as_mut().cast_unchecked::<marker::Internal>(),
951 .insert_fit(key, val, edge);
954 InsertResult::Split(SplitResult { left: left.forget_type(), k, v, right })
959 impl<'a, K: 'a, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
960 /// Inserts a new key/value pair between the key/value pairs to the right and left of
961 /// this edge. This method splits the node if there isn't enough room, and tries to
962 /// insert the split off portion into the parent node recursively, until the root is reached.
964 /// If the returned result is a `Fit`, its handle's node can be this edge's node or an ancestor.
965 /// If the returned result is a `Split`, the `left` field will be the root node.
966 /// The returned pointer points to the inserted value.
967 pub fn insert_recursing(
971 ) -> (InsertResult<'a, K, V, marker::LeafOrInternal>, *mut V) {
972 let (mut split, val_ptr) = match self.insert(key, value) {
973 (InsertResult::Fit(handle), ptr) => {
974 return (InsertResult::Fit(handle.forget_node_type()), ptr);
976 (InsertResult::Split(split), val_ptr) => (split, val_ptr),
980 split = match split.left.ascend() {
981 Ok(parent) => match parent.insert(split.k, split.v, split.right) {
982 InsertResult::Fit(handle) => {
983 return (InsertResult::Fit(handle.forget_node_type()), val_ptr);
985 InsertResult::Split(split) => split,
988 return (InsertResult::Split(SplitResult { left: root, ..split }), val_ptr);
995 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
996 /// Finds the node pointed to by this edge.
998 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
999 /// both, upon success, do nothing.
1000 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1002 height: self.node.height - 1,
1004 (&*self.node.as_internal().edges.get_unchecked(self.idx).as_ptr()).as_ptr()
1006 root: self.node.root,
1007 _marker: PhantomData,
1012 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1013 pub fn into_kv(self) -> (&'a K, &'a V) {
1014 let keys = self.node.into_key_slice();
1015 let vals = self.node.into_val_slice();
1016 unsafe { (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx)) }
1020 impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1021 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1023 let (keys, vals) = self.node.into_slices_mut();
1024 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1029 impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1030 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1032 let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
1033 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1038 impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1039 /// Helps implementations of `split` for a particular `NodeType`,
1040 /// by taking care of leaf data.
1041 fn leafy_split(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V, usize) {
1043 let k = ptr::read(self.node.keys().get_unchecked(self.idx));
1044 let v = ptr::read(self.node.vals().get_unchecked(self.idx));
1046 let new_len = self.node.len() - self.idx - 1;
1048 ptr::copy_nonoverlapping(
1049 self.node.keys().as_ptr().add(self.idx + 1),
1050 new_node.keys.as_mut_ptr() as *mut K,
1053 ptr::copy_nonoverlapping(
1054 self.node.vals().as_ptr().add(self.idx + 1),
1055 new_node.vals.as_mut_ptr() as *mut V,
1059 (*self.node.as_leaf_mut()).len = self.idx as u16;
1060 new_node.len = new_len as u16;
1066 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1067 /// Splits the underlying node into three parts:
1069 /// - The node is truncated to only contain the key/value pairs to the right of
1071 /// - The key and value pointed to by this handle and extracted.
1072 /// - All the key/value pairs to the right of this handle are put into a newly
1074 pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
1076 let mut new_node = Box::new(LeafNode::new());
1078 let (k, v, _) = self.leafy_split(&mut new_node);
1080 (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 })
1084 /// Removes the key/value pair pointed to by this handle and returns it, along with the edge
1085 /// between the now adjacent key/value pairs (if any) to the left and right of this handle.
1088 ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
1090 let k = slice_remove(self.node.keys_mut(), self.idx);
1091 let v = slice_remove(self.node.vals_mut(), self.idx);
1092 (*self.node.as_leaf_mut()).len -= 1;
1093 ((k, v), self.left_edge())
1098 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1099 /// Splits the underlying node into three parts:
1101 /// - The node is truncated to only contain the edges and key/value pairs to the
1102 /// right of this handle.
1103 /// - The key and value pointed to by this handle and extracted.
1104 /// - All the edges and key/value pairs to the right of this handle are put into
1105 /// a newly allocated node.
1106 pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
1108 let mut new_node = Box::new(InternalNode::new());
1110 let (k, v, new_len) = self.leafy_split(&mut new_node.data);
1111 let height = self.node.height;
1113 ptr::copy_nonoverlapping(
1114 self.node.as_internal().edges.as_ptr().add(self.idx + 1),
1115 new_node.edges.as_mut_ptr(),
1119 let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
1121 for i in 0..(new_len + 1) {
1122 Handle::new_edge(new_root.node_as_mut().cast_unchecked(), i).correct_parent_link();
1125 (self.node, k, v, new_root)
1129 /// Returns `true` if it is valid to call `.merge()`, i.e., whether there is enough room in
1130 /// a node to hold the combination of the nodes to the left and right of this handle along
1131 /// with the key/value pair at this handle.
1132 pub fn can_merge(&self) -> bool {
1133 (self.reborrow().left_edge().descend().len()
1134 + self.reborrow().right_edge().descend().len()
1139 /// Combines the node immediately to the left of this handle, the key/value pair pointed
1140 /// to by this handle, and the node immediately to the right of this handle into one new
1141 /// child of the underlying node, returning an edge referencing that new child.
1143 /// Assumes that this edge `.can_merge()`.
1146 ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1147 let self1 = unsafe { ptr::read(&self) };
1148 let self2 = unsafe { ptr::read(&self) };
1149 let mut left_node = self1.left_edge().descend();
1150 let left_len = left_node.len();
1151 let mut right_node = self2.right_edge().descend();
1152 let right_len = right_node.len();
1154 // necessary for correctness, but in a private module
1155 assert!(left_len + right_len < CAPACITY);
1159 left_node.keys_mut().get_unchecked_mut(left_len),
1160 slice_remove(self.node.keys_mut(), self.idx),
1162 ptr::copy_nonoverlapping(
1163 right_node.keys().as_ptr(),
1164 left_node.keys_mut().as_mut_ptr().add(left_len + 1),
1168 left_node.vals_mut().get_unchecked_mut(left_len),
1169 slice_remove(self.node.vals_mut(), self.idx),
1171 ptr::copy_nonoverlapping(
1172 right_node.vals().as_ptr(),
1173 left_node.vals_mut().as_mut_ptr().add(left_len + 1),
1177 slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
1178 for i in self.idx + 1..self.node.len() {
1179 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
1181 (*self.node.as_leaf_mut()).len -= 1;
1183 (*left_node.as_leaf_mut()).len += right_len as u16 + 1;
1185 let layout = if self.node.height > 1 {
1186 ptr::copy_nonoverlapping(
1187 right_node.cast_unchecked().as_internal().edges.as_ptr(),
1197 for i in left_len + 1..left_len + right_len + 2 {
1198 Handle::new_edge(left_node.cast_unchecked().reborrow_mut(), i)
1199 .correct_parent_link();
1202 Layout::new::<InternalNode<K, V>>()
1204 Layout::new::<LeafNode<K, V>>()
1206 Global.dealloc(right_node.node.cast(), layout);
1208 Handle::new_edge(self.node, self.idx)
1212 /// This removes a key/value pair from the left child and places it in the key/value storage
1213 /// pointed to by this handle while pushing the old key/value pair of this handle into the right
1215 pub fn steal_left(&mut self) {
1217 let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
1219 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1220 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1222 match self.reborrow_mut().right_edge().descend().force() {
1223 ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
1224 ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()),
1229 /// This removes a key/value pair from the right child and places it in the key/value storage
1230 /// pointed to by this handle while pushing the old key/value pair of this handle into the left
1232 pub fn steal_right(&mut self) {
1234 let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
1236 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1237 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1239 match self.reborrow_mut().left_edge().descend().force() {
1240 ForceResult::Leaf(mut leaf) => leaf.push(k, v),
1241 ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()),
1246 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1247 pub fn bulk_steal_left(&mut self, count: usize) {
1249 let mut left_node = ptr::read(self).left_edge().descend();
1250 let left_len = left_node.len();
1251 let mut right_node = ptr::read(self).right_edge().descend();
1252 let right_len = right_node.len();
1254 // Make sure that we may steal safely.
1255 assert!(right_len + count <= CAPACITY);
1256 assert!(left_len >= count);
1258 let new_left_len = left_len - count;
1262 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1263 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1265 let kv = self.reborrow_mut().into_kv_mut();
1266 (kv.0 as *mut K, kv.1 as *mut V)
1269 // Make room for stolen elements in the right child.
1270 ptr::copy(right_kv.0, right_kv.0.add(count), right_len);
1271 ptr::copy(right_kv.1, right_kv.1.add(count), right_len);
1273 // Move elements from the left child to the right one.
1274 move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
1276 // Move parent's key/value pair to the right child.
1277 move_kv(parent_kv, 0, right_kv, count - 1, 1);
1279 // Move the left-most stolen pair to the parent.
1280 move_kv(left_kv, new_left_len, parent_kv, 0, 1);
1283 (*left_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
1284 (*right_node.reborrow_mut().as_leaf_mut()).len += count as u16;
1286 match (left_node.force(), right_node.force()) {
1287 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1288 // Make room for stolen edges.
1289 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1290 ptr::copy(right_edges, right_edges.add(count), right_len + 1);
1291 right.correct_childrens_parent_links(count, count + right_len + 1);
1293 move_edges(left, new_left_len + 1, right, 0, count);
1295 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1303 /// The symmetric clone of `bulk_steal_left`.
1304 pub fn bulk_steal_right(&mut self, count: usize) {
1306 let mut left_node = ptr::read(self).left_edge().descend();
1307 let left_len = left_node.len();
1308 let mut right_node = ptr::read(self).right_edge().descend();
1309 let right_len = right_node.len();
1311 // Make sure that we may steal safely.
1312 assert!(left_len + count <= CAPACITY);
1313 assert!(right_len >= count);
1315 let new_right_len = right_len - count;
1319 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1320 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1322 let kv = self.reborrow_mut().into_kv_mut();
1323 (kv.0 as *mut K, kv.1 as *mut V)
1326 // Move parent's key/value pair to the left child.
1327 move_kv(parent_kv, 0, left_kv, left_len, 1);
1329 // Move elements from the right child to the left one.
1330 move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
1332 // Move the right-most stolen pair to the parent.
1333 move_kv(right_kv, count - 1, parent_kv, 0, 1);
1335 // Fix right indexing
1336 ptr::copy(right_kv.0.add(count), right_kv.0, new_right_len);
1337 ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len);
1340 (*left_node.reborrow_mut().as_leaf_mut()).len += count as u16;
1341 (*right_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
1343 match (left_node.force(), right_node.force()) {
1344 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1345 move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
1347 // Fix right indexing.
1348 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1349 ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
1350 right.correct_childrens_parent_links(0, new_right_len + 1);
1352 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1361 unsafe fn move_kv<K, V>(
1362 source: (*mut K, *mut V),
1363 source_offset: usize,
1364 dest: (*mut K, *mut V),
1369 ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
1370 ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
1374 // Source and destination must have the same height.
1375 unsafe fn move_edges<K, V>(
1376 mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
1377 source_offset: usize,
1378 mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
1382 let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
1383 let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
1385 ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
1386 dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
1390 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
1391 /// Removes any static information asserting that this node is a `Leaf` node.
1392 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1393 NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
1397 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
1398 /// Removes any static information asserting that this node is an `Internal` node.
1399 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1400 NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
1404 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
1405 pub fn forget_node_type(
1407 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1408 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1412 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1413 pub fn forget_node_type(
1415 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
1416 unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
1420 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
1421 pub fn forget_node_type(
1423 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1424 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1428 impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV> {
1429 pub fn forget_node_type(
1431 ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
1432 unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
1436 impl<BorrowType, K, V, HandleType>
1437 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType>
1439 /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1443 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
1444 Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>,
1446 match self.node.force() {
1447 ForceResult::Leaf(node) => {
1448 ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
1450 ForceResult::Internal(node) => {
1451 ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
1457 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1458 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1459 /// The first edge of `right` remains unchanged.
1462 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1465 let left_new_len = self.idx;
1466 let mut left_node = self.reborrow_mut().into_node();
1468 let right_new_len = left_node.len() - left_new_len;
1469 let mut right_node = right.reborrow_mut();
1471 assert!(right_node.len() == 0);
1472 assert!(left_node.height == right_node.height);
1474 if right_new_len > 0 {
1475 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1476 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1478 move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
1480 (*left_node.reborrow_mut().as_leaf_mut()).len = left_new_len as u16;
1481 (*right_node.reborrow_mut().as_leaf_mut()).len = right_new_len as u16;
1483 match (left_node.force(), right_node.force()) {
1484 (ForceResult::Internal(left), ForceResult::Internal(right)) => {
1485 move_edges(left, left_new_len + 1, right, 1, right_new_len);
1487 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
1497 pub enum ForceResult<Leaf, Internal> {
1502 /// Result of insertion, when a node needed to expand beyond its capacity.
1503 /// Does not distinguish between `Leaf` and `Internal` because `Root` doesn't.
1504 pub struct SplitResult<'a, K, V> {
1505 // Altered node in existing tree with elements and edges that belong to the left of `k`.
1506 pub left: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
1507 // Some key and value split off, to be inserted elsewhere.
1510 // Owned, unattached, new node with elements and edges that belong to the right of `k`.
1511 pub right: Root<K, V>,
1514 pub enum InsertResult<'a, K, V, Type> {
1515 Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
1516 Split(SplitResult<'a, K, V>),
1520 use core::marker::PhantomData;
1523 pub enum Internal {}
1524 pub enum LeafOrInternal {}
1527 pub struct Immut<'a>(PhantomData<&'a ()>);
1528 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1534 unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
1536 ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
1537 ptr::write(slice.get_unchecked_mut(idx), val);
1541 unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
1543 let ret = ptr::read(slice.get_unchecked(idx));
1544 ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);