1 // This is an attempt at an implementation following the ideal
4 // struct BTreeMap<K, V> {
6 // root: Option<Box<Node<K, V, height>>>
9 // struct Node<K, V, height: usize> {
10 // keys: [K; 2 * B - 1],
11 // vals: [V; 2 * B - 1],
12 // edges: if height > 0 {
13 // [Box<Node<K, V, height - 1>>; 2 * B]
15 // parent: *const Node<K, V, height + 1>,
21 // Since Rust doesn't actually have dependent types and polymorphic recursion,
22 // we make do with lots of unsafety.
24 // A major goal of this module is to avoid complexity by treating the tree as a generic (if
25 // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
26 // this module doesn't care whether the entries are sorted, which nodes can be underfull, or
27 // even what underfull means. However, we do rely on a few invariants:
29 // - Trees must have uniform depth/height. This means that every path down to a leaf from a
30 // given node has exactly the same length.
31 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
32 // This implies that even an empty internal node has at least one edge.
34 use core::marker::PhantomData;
35 use core::mem::{self, MaybeUninit};
36 use core::ptr::{self, Unique, NonNull};
39 use crate::alloc::{Global, Alloc, Layout};
40 use crate::boxed::Box;
43 pub const MIN_LEN: usize = B - 1;
44 pub const CAPACITY: usize = 2 * B - 1;
46 /// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
47 /// these, since only the first `len` keys and values are assumed to be initialized. As such,
48 /// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
51 /// We have a separate type for the header and rely on it matching the prefix of `LeafNode`, in
52 /// order to statically allocate a single dummy node to avoid allocations. This struct is
53 /// `repr(C)` to prevent them from being reordered. `LeafNode` does not just contain a
54 /// `NodeHeader` because we do not want unnecessary padding between `len` and the keys.
55 /// Crucially, `NodeHeader` can be safely transmuted to different K and V. (This is exploited
57 /// See `into_key_slice` for an explanation of K2. K2 cannot be safely transmuted around
58 /// because the size of `NodeHeader` depends on its alignment!
60 struct NodeHeader<K, V, K2 = ()> {
61 /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
62 /// This either points to an actual node or is null.
63 parent: *const InternalNode<K, V>,
65 /// This node's index into the parent node's `edges` array.
66 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
67 /// This is only guaranteed to be initialized when `parent` is non-null.
68 parent_idx: MaybeUninit<u16>,
70 /// The number of keys and values this node stores.
72 /// This next to `parent_idx` to encourage the compiler to join `len` and
73 /// `parent_idx` into the same 32-bit word, reducing space overhead.
76 /// See `into_key_slice`.
80 struct LeafNode<K, V> {
81 /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
82 /// This either points to an actual node or is null.
83 parent: *const InternalNode<K, V>,
85 /// This node's index into the parent node's `edges` array.
86 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
87 /// This is only guaranteed to be initialized when `parent` is non-null.
88 parent_idx: MaybeUninit<u16>,
90 /// The number of keys and values this node stores.
92 /// This next to `parent_idx` to encourage the compiler to join `len` and
93 /// `parent_idx` into the same 32-bit word, reducing space overhead.
96 /// The arrays storing the actual data of the node. Only the first `len` elements of each
97 /// array are initialized and valid.
98 keys: [MaybeUninit<K>; CAPACITY],
99 vals: [MaybeUninit<V>; CAPACITY],
102 impl<K, V> LeafNode<K, V> {
103 /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
104 /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
105 unsafe fn new() -> Self {
107 // As a general policy, we leave fields uninitialized if they can be, as this should
108 // be both slightly faster and easier to track in Valgrind.
109 keys: uninitialized_array![_; CAPACITY],
110 vals: uninitialized_array![_; CAPACITY],
112 parent_idx: MaybeUninit::uninit(),
118 impl<K, V> NodeHeader<K, V> {
119 fn is_shared_root(&self) -> bool {
120 ptr::eq(self, &EMPTY_ROOT_NODE as *const _ as *const _)
124 // We need to implement Sync here in order to make a static instance.
125 unsafe impl Sync for NodeHeader<(), ()> {}
127 // An empty node used as a placeholder for the root node, to avoid allocations.
128 // We use just a header in order to save space, since no operation on an empty tree will
129 // ever take a pointer past the first key.
130 static EMPTY_ROOT_NODE: NodeHeader<(), ()> = NodeHeader {
132 parent_idx: MaybeUninit::uninit(),
137 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
138 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
139 /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
140 /// node, allowing code to act on leaf and internal nodes generically without having to even check
141 /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
143 struct InternalNode<K, V> {
144 data: LeafNode<K, V>,
146 /// The pointers to the children of this node. `len + 1` of these are considered
147 /// initialized and valid.
148 edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
151 impl<K, V> InternalNode<K, V> {
152 /// Creates a new `InternalNode`.
154 /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
155 /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
156 /// edges are initialized and valid, meaning that even when the node is empty (having a
157 /// `len` of 0), there must be one initialized and valid edge. This function does not set up
159 unsafe fn new() -> Self {
161 data: LeafNode::new(),
162 edges: uninitialized_array![_; 2*B],
167 /// An owned pointer to a node. This basically is either `Box<LeafNode<K, V>>` or
168 /// `Box<InternalNode<K, V>>`. However, it contains no information as to which of the two types
169 /// of nodes is actually behind the box, and, partially due to this lack of information, has no
171 struct BoxedNode<K, V> {
172 ptr: Unique<LeafNode<K, V>>
175 impl<K, V> BoxedNode<K, V> {
176 fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
177 BoxedNode { ptr: Box::into_unique(node) }
180 fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
182 BoxedNode { ptr: Unique::new_unchecked(Box::into_raw(node) as *mut LeafNode<K, V>) }
186 unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
187 BoxedNode { ptr: Unique::from(ptr) }
190 fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
191 NonNull::from(self.ptr)
195 /// An owned tree. Note that despite being owned, this does not have a destructor,
196 /// and must be cleaned up manually.
197 pub struct Root<K, V> {
198 node: BoxedNode<K, V>,
202 unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> { }
203 unsafe impl<K: Send, V: Send> Send for Root<K, V> { }
205 impl<K, V> Root<K, V> {
206 pub fn is_shared_root(&self) -> bool {
207 self.as_ref().is_shared_root()
210 pub fn shared_empty_root() -> Self {
213 BoxedNode::from_ptr(NonNull::new_unchecked(
214 &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V> as *mut _
221 pub fn new_leaf() -> Self {
223 node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })),
229 -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
232 node: self.node.as_ptr(),
233 root: self as *const _ as *mut _,
234 _marker: PhantomData,
238 pub fn as_mut(&mut self)
239 -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
242 node: self.node.as_ptr(),
243 root: self as *mut _,
244 _marker: PhantomData,
248 pub fn into_ref(self)
249 -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
252 node: self.node.as_ptr(),
253 root: ptr::null_mut(), // FIXME: Is there anything better to do here?
254 _marker: PhantomData,
258 /// Adds a new internal node with a single edge, pointing to the previous root, and make that
259 /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
260 pub fn push_level(&mut self)
261 -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
262 debug_assert!(!self.is_shared_root());
263 let mut new_node = Box::new(unsafe { InternalNode::new() });
264 new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
266 self.node = BoxedNode::from_internal(new_node);
269 let mut ret = NodeRef {
271 node: self.node.as_ptr(),
272 root: self as *mut _,
277 ret.reborrow_mut().first_edge().correct_parent_link();
283 /// Removes the root node, using its first child as the new root. This cannot be called when
284 /// the tree consists only of a leaf node. As it is intended only to be called when the root
285 /// has only one edge, no cleanup is done on any of the other children are elements of the root.
286 /// This decreases the height by 1 and is the opposite of `push_level`.
287 pub fn pop_level(&mut self) {
288 debug_assert!(self.height > 0);
290 let top = self.node.ptr;
293 BoxedNode::from_ptr(self.as_mut()
294 .cast_unchecked::<marker::Internal>()
300 unsafe { (*self.as_mut().as_leaf_mut()).parent = ptr::null(); }
303 Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
308 // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
309 // is `Mut`. This is technically wrong, but cannot result in any unsafety due to
310 // internal use of `NodeRef` because we stay completely generic over `K` and `V`.
311 // However, whenever a public type wraps `NodeRef`, make sure that it has the
313 /// A reference to a node.
315 /// This type has a number of parameters that controls how it acts:
316 /// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
317 /// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
318 /// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
319 /// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
320 /// - `K` and `V`: These control what types of things are stored in the nodes.
321 /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
322 /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
323 /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
324 /// `NodeRef` could be pointing to either type of node.
325 /// Note that in case of a leaf node, this might still be the shared root! Only turn
326 /// this into a `LeafNode` reference if you know it is not a root! Shared references
327 /// must be dereferencable *for the entire size of their pointee*, so `&InternalNode`
328 /// pointing to the shared root is UB.
329 /// Turning this into a `NodeHeader` is always safe.
330 pub struct NodeRef<BorrowType, K, V, Type> {
332 node: NonNull<LeafNode<K, V>>,
333 // This is null unless the borrow type is `Mut`
334 root: *const Root<K, V>,
335 _marker: PhantomData<(BorrowType, Type)>
338 impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> { }
339 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
340 fn clone(&self) -> Self {
345 unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync
346 for NodeRef<BorrowType, K, V, Type> { }
348 unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send
349 for NodeRef<marker::Immut<'a>, K, V, Type> { }
350 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send
351 for NodeRef<marker::Mut<'a>, K, V, Type> { }
352 unsafe impl<K: Send, V: Send, Type> Send
353 for NodeRef<marker::Owned, K, V, Type> { }
355 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
356 fn as_internal(&self) -> &InternalNode<K, V> {
358 &*(self.node.as_ptr() as *mut InternalNode<K, V>)
363 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
364 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
366 &mut *(self.node.as_ptr() as *mut InternalNode<K, V>)
372 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
373 /// Finds the length of the node. This is the number of keys or values. In an
374 /// internal node, the number of edges is `len() + 1`.
375 pub fn len(&self) -> usize {
376 self.as_header().len as usize
379 /// Returns the height of this node in the whole tree. Zero height denotes the
381 pub fn height(&self) -> usize {
385 /// Removes any static information about whether this node is a `Leaf` or an
387 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
396 /// Temporarily takes out another, immutable reference to the same node.
397 fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
406 /// Assert that this is indeed a proper leaf node, and not the shared root.
407 unsafe fn as_leaf(&self) -> &LeafNode<K, V> {
411 fn as_header(&self) -> &NodeHeader<K, V> {
413 &*(self.node.as_ptr() as *const NodeHeader<K, V>)
417 pub fn is_shared_root(&self) -> bool {
418 self.as_header().is_shared_root()
421 pub fn keys(&self) -> &[K] {
422 self.reborrow().into_key_slice()
425 fn vals(&self) -> &[V] {
426 self.reborrow().into_val_slice()
429 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
430 /// node actually has a parent, where `handle` points to the edge of the parent
431 /// that points to the current node. Returns `Err(self)` if the current node has
432 /// no parent, giving back the original `NodeRef`.
434 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
435 /// both, upon success, do nothing.
436 pub fn ascend(self) -> Result<
447 let parent_as_leaf = self.as_header().parent as *const LeafNode<K, V>;
448 if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
451 height: self.height + 1,
456 idx: unsafe { usize::from(*self.as_header().parent_idx.as_ptr()) },
464 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
465 Handle::new_edge(self, 0)
468 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
469 let len = self.len();
470 Handle::new_edge(self, len)
473 /// Note that `self` must be nonempty.
474 pub fn first_kv(self) -> Handle<Self, marker::KV> {
475 debug_assert!(self.len() > 0);
476 Handle::new_kv(self, 0)
479 /// Note that `self` must be nonempty.
480 pub fn last_kv(self) -> Handle<Self, marker::KV> {
481 let len = self.len();
482 debug_assert!(len > 0);
483 Handle::new_kv(self, len - 1)
487 impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
488 /// Similar to `ascend`, gets a reference to a node's parent node, but also
489 /// deallocate the current node in the process. This is unsafe because the
490 /// current node will still be accessible despite being deallocated.
491 pub unsafe fn deallocate_and_ascend(self) -> Option<
501 debug_assert!(!self.is_shared_root());
502 let node = self.node;
503 let ret = self.ascend().ok();
504 Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
509 impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
510 /// Similar to `ascend`, gets a reference to a node's parent node, but also
511 /// deallocate the current node in the process. This is unsafe because the
512 /// current node will still be accessible despite being deallocated.
513 pub unsafe fn deallocate_and_ascend(self) -> Option<
523 let node = self.node;
524 let ret = self.ascend().ok();
525 Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
530 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
531 /// Unsafely asserts to the compiler some static information about whether this
532 /// node is a `Leaf`.
533 unsafe fn cast_unchecked<NewType>(&mut self)
534 -> NodeRef<marker::Mut<'_>, K, V, NewType> {
544 /// Temporarily takes out another, mutable reference to the same node. Beware, as
545 /// this method is very dangerous, doubly so since it may not immediately appear
548 /// Because mutable pointers can roam anywhere around the tree and can even (through
549 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
550 /// can easily be used to make the original mutable pointer dangling, or, in the case
551 /// of a reborrowed handle, out of bounds.
552 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
553 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
554 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
563 /// Returns a raw ptr to avoid asserting exclusive access to the entire node.
564 fn as_leaf_mut(&mut self) -> *mut LeafNode<K, V> {
565 // We are mutable, so we cannot be the root, so accessing this as a leaf is okay.
569 fn keys_mut(&mut self) -> &mut [K] {
570 unsafe { self.reborrow_mut().into_key_slice_mut() }
573 fn vals_mut(&mut self) -> &mut [V] {
574 unsafe { self.reborrow_mut().into_val_slice_mut() }
578 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
579 fn into_key_slice(self) -> &'a [K] {
580 // We have to be careful here because we might be pointing to the shared root.
581 // In that case, we must not create an `&LeafNode`. We could just return
582 // an empty slice whenever the length is 0 (this includes the shared root),
583 // but we want to avoid that run-time check.
584 // Instead, we create a slice pointing into the node whenever possible.
585 // We can sometimes do this even for the shared root, as the slice will be
586 // empty. We cannot *always* do this because if the type is too highly
587 // aligned, the offset of `keys` in a "full node" might be outside the bounds
588 // of the header! So we do an alignment check first, that will be
589 // evaluated at compile-time, and only do any run-time check in the rare case
590 // that the alignment is very big.
591 if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
594 // Thanks to the alignment check above, we know that `keys` will be
595 // in-bounds of some allocation even if this is the shared root!
596 // (We might be one-past-the-end, but that is allowed by LLVM.)
597 // Getting the pointer is tricky though. `NodeHeader` does not have a `keys`
598 // field because we want its size to not depend on the alignment of `K`
599 // (needed becuase `as_header` should be safe). We cannot call `as_leaf`
600 // because we might be the shared root.
601 // For this reason, `NodeHeader` has this `K2` parameter (that's usually `()`
602 // and hence just adds a size-0-align-1 field, not affecting layout).
603 // We know that we can transmute `NodeHeader<K, V, ()>` to `NodeHeader<K, V, K>`
604 // because we did the alignment check above, and hence `NodeHeader<K, V, K>`
605 // is not bigger than `NodeHeader<K, V, ()>`! Then we can use `NodeHeader<K, V, K>`
606 // to compute the pointer where the keys start.
607 // This entire hack will become unnecessary once
608 // <https://github.com/rust-lang/rfcs/pull/2582> lands, then we can just take a raw
609 // pointer to the `keys` field of `*const InternalNode<K, V>`.
611 // This is a non-debug-assert because it can be completely compile-time evaluated.
612 assert!(mem::size_of::<NodeHeader<K, V>>() == mem::size_of::<NodeHeader<K, V, K>>());
613 let header = self.as_header() as *const _ as *const NodeHeader<K, V, K>;
614 let keys = unsafe { &(*header).keys_start as *const _ as *const K };
616 slice::from_raw_parts(keys, self.len())
621 fn into_val_slice(self) -> &'a [V] {
622 debug_assert!(!self.is_shared_root());
623 // We cannot be the root, so `as_leaf` is okay
625 slice::from_raw_parts(
626 MaybeUninit::first_ptr(&self.as_leaf().vals),
632 fn into_slices(self) -> (&'a [K], &'a [V]) {
633 let k = unsafe { ptr::read(&self) };
634 (k.into_key_slice(), self.into_val_slice())
638 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
639 /// Gets a mutable reference to the root itself. This is useful primarily when the
640 /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
641 pub fn into_root_mut(self) -> &'a mut Root<K, V> {
643 &mut *(self.root as *mut Root<K, V>)
647 fn into_key_slice_mut(mut self) -> &'a mut [K] {
648 // Same as for `into_key_slice` above, we try to avoid a run-time check
649 // (the alignment comparison will usually be performed at compile-time).
650 if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
654 slice::from_raw_parts_mut(
655 MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys),
662 fn into_val_slice_mut(mut self) -> &'a mut [V] {
663 debug_assert!(!self.is_shared_root());
665 slice::from_raw_parts_mut(
666 MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals),
672 fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
673 debug_assert!(!self.is_shared_root());
674 // We cannot use the getters here, because calling the second one
675 // invalidates the reference returned by the first.
676 // More precisely, it is the call to `len` that is the culprit,
677 // because that creates a shared reference to the header, which *can*
678 // overlap with the keys (and even the values, for ZST keys).
680 let len = self.len();
681 let leaf = self.as_leaf_mut();
682 let keys = slice::from_raw_parts_mut(
683 MaybeUninit::first_ptr_mut(&mut (*leaf).keys),
686 let vals = slice::from_raw_parts_mut(
687 MaybeUninit::first_ptr_mut(&mut (*leaf).vals),
695 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
696 /// Adds a key/value pair the end of the node.
697 pub fn push(&mut self, key: K, val: V) {
698 // Necessary for correctness, but this is an internal module
699 debug_assert!(self.len() < CAPACITY);
700 debug_assert!(!self.is_shared_root());
702 let idx = self.len();
705 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
706 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
708 (*self.as_leaf_mut()).len += 1;
712 /// Adds a key/value pair to the beginning of the node.
713 pub fn push_front(&mut self, key: K, val: V) {
714 // Necessary for correctness, but this is an internal module
715 debug_assert!(self.len() < CAPACITY);
716 debug_assert!(!self.is_shared_root());
719 slice_insert(self.keys_mut(), 0, key);
720 slice_insert(self.vals_mut(), 0, val);
722 (*self.as_leaf_mut()).len += 1;
727 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
728 /// Adds a key/value pair and an edge to go to the right of that pair to
729 /// the end of the node.
730 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
731 // Necessary for correctness, but this is an internal module
732 debug_assert!(edge.height == self.height - 1);
733 debug_assert!(self.len() < CAPACITY);
735 let idx = self.len();
738 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
739 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
740 self.as_internal_mut().edges.get_unchecked_mut(idx + 1).write(edge.node);
742 (*self.as_leaf_mut()).len += 1;
744 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
748 fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
749 for i in first..after_last {
750 Handle::new_edge(unsafe { self.reborrow_mut() }, i).correct_parent_link();
754 fn correct_all_childrens_parent_links(&mut self) {
755 let len = self.len();
756 self.correct_childrens_parent_links(0, len + 1);
759 /// Adds a key/value pair and an edge to go to the left of that pair to
760 /// the beginning of the node.
761 pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
762 // Necessary for correctness, but this is an internal module
763 debug_assert!(edge.height == self.height - 1);
764 debug_assert!(self.len() < CAPACITY);
767 slice_insert(self.keys_mut(), 0, key);
768 slice_insert(self.vals_mut(), 0, val);
770 slice::from_raw_parts_mut(
771 MaybeUninit::first_ptr_mut(&mut self.as_internal_mut().edges),
778 (*self.as_leaf_mut()).len += 1;
780 self.correct_all_childrens_parent_links();
785 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
786 /// Removes a key/value pair from the end of this node. If this is an internal node,
787 /// also removes the edge that was to the right of that pair.
788 pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
789 // Necessary for correctness, but this is an internal module
790 debug_assert!(self.len() > 0);
792 let idx = self.len() - 1;
795 let key = ptr::read(self.keys().get_unchecked(idx));
796 let val = ptr::read(self.vals().get_unchecked(idx));
797 let edge = match self.reborrow_mut().force() {
798 ForceResult::Leaf(_) => None,
799 ForceResult::Internal(internal) => {
800 let edge = ptr::read(
801 internal.as_internal().edges.get_unchecked(idx + 1).as_ptr()
803 let mut new_root = Root { node: edge, height: internal.height - 1 };
804 (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
809 (*self.as_leaf_mut()).len -= 1;
814 /// Removes a key/value pair from the beginning of this node. If this is an internal node,
815 /// also removes the edge that was to the left of that pair.
816 pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
817 // Necessary for correctness, but this is an internal module
818 debug_assert!(self.len() > 0);
820 let old_len = self.len();
823 let key = slice_remove(self.keys_mut(), 0);
824 let val = slice_remove(self.vals_mut(), 0);
825 let edge = match self.reborrow_mut().force() {
826 ForceResult::Leaf(_) => None,
827 ForceResult::Internal(mut internal) => {
828 let edge = slice_remove(
829 slice::from_raw_parts_mut(
830 MaybeUninit::first_ptr_mut(&mut internal.as_internal_mut().edges),
836 let mut new_root = Root { node: edge, height: internal.height - 1 };
837 (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
839 for i in 0..old_len {
840 Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
847 (*self.as_leaf_mut()).len -= 1;
853 fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
855 self.keys_mut().as_mut_ptr(),
856 self.vals_mut().as_mut_ptr()
861 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
862 /// Checks whether a node is an `Internal` node or a `Leaf` node.
863 pub fn force(self) -> ForceResult<
864 NodeRef<BorrowType, K, V, marker::Leaf>,
865 NodeRef<BorrowType, K, V, marker::Internal>
867 if self.height == 0 {
868 ForceResult::Leaf(NodeRef {
875 ForceResult::Internal(NodeRef {
885 /// A reference to a specific key/value pair or edge within a node. The `Node` parameter
886 /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
887 /// pair) or `Edge` (signifying a handle on an edge).
889 /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
890 /// a child node, these represent the spaces where child pointers would go between the key/value
891 /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
892 /// to the left of the node, one between the two pairs, and one at the right of the node.
893 pub struct Handle<Node, Type> {
896 _marker: PhantomData<Type>
899 impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
900 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
901 // `Clone`able is when it is an immutable reference and therefore `Copy`.
902 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
903 fn clone(&self) -> Self {
908 impl<Node, Type> Handle<Node, Type> {
909 /// Retrieves the node that contains the edge of key/value pair this handle points to.
910 pub fn into_node(self) -> Node {
915 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
916 /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`.
917 pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
918 // Necessary for correctness, but in a private module
919 debug_assert!(idx < node.len());
928 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
929 Handle::new_edge(self.node, self.idx)
932 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
933 Handle::new_edge(self.node, self.idx + 1)
937 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
938 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
940 fn eq(&self, other: &Self) -> bool {
941 self.node.node == other.node.node && self.idx == other.idx
945 impl<BorrowType, K, V, NodeType, HandleType>
946 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
948 /// Temporarily takes out another, immutable handle on the same location.
949 pub fn reborrow(&self)
950 -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
952 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
954 node: self.node.reborrow(),
961 impl<'a, K, V, NodeType, HandleType>
962 Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
964 /// Temporarily takes out another, mutable handle on the same location. Beware, as
965 /// this method is very dangerous, doubly so since it may not immediately appear
968 /// Because mutable pointers can roam anywhere around the tree and can even (through
969 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
970 /// can easily be used to make the original mutable pointer dangling, or, in the case
971 /// of a reborrowed handle, out of bounds.
972 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
973 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
974 pub unsafe fn reborrow_mut(&mut self)
975 -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
977 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
979 node: self.node.reborrow_mut(),
986 impl<BorrowType, K, V, NodeType>
987 Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
989 /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
991 pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
992 // Necessary for correctness, but in a private module
993 debug_assert!(idx <= node.len());
1002 pub fn left_kv(self)
1003 -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
1006 Ok(Handle::new_kv(self.node, self.idx - 1))
1012 pub fn right_kv(self)
1013 -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
1015 if self.idx < self.node.len() {
1016 Ok(Handle::new_kv(self.node, self.idx))
1023 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
1024 /// Inserts a new key/value pair between the key/value pairs to the right and left of
1025 /// this edge. This method assumes that there is enough space in the node for the new
1028 /// The returned pointer points to the inserted value.
1029 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
1030 // Necessary for correctness, but in a private module
1031 debug_assert!(self.node.len() < CAPACITY);
1032 debug_assert!(!self.node.is_shared_root());
1035 slice_insert(self.node.keys_mut(), self.idx, key);
1036 slice_insert(self.node.vals_mut(), self.idx, val);
1038 (*self.node.as_leaf_mut()).len += 1;
1040 self.node.vals_mut().get_unchecked_mut(self.idx)
1044 /// Inserts a new key/value pair between the key/value pairs to the right and left of
1045 /// this edge. This method splits the node if there isn't enough room.
1047 /// The returned pointer points to the inserted value.
1048 pub fn insert(mut self, key: K, val: V)
1049 -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
1051 if self.node.len() < CAPACITY {
1052 let ptr = self.insert_fit(key, val);
1053 (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr)
1055 let middle = Handle::new_kv(self.node, B);
1056 let (mut left, k, v, mut right) = middle.split();
1057 let ptr = if self.idx <= B {
1059 Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val)
1064 right.as_mut().cast_unchecked::<marker::Leaf>(),
1066 ).insert_fit(key, val)
1069 (InsertResult::Split(left, k, v, right), ptr)
1074 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1075 /// Fixes the parent pointer and index in the child node below this edge. This is useful
1076 /// when the ordering of edges has been changed, such as in the various `insert` methods.
1077 fn correct_parent_link(mut self) {
1078 let idx = self.idx as u16;
1079 let ptr = self.node.as_internal_mut() as *mut _;
1080 let mut child = self.descend();
1082 (*child.as_leaf_mut()).parent = ptr;
1083 (*child.as_leaf_mut()).parent_idx.write(idx);
1087 /// Unsafely asserts to the compiler some static information about whether the underlying
1088 /// node of this handle is a `Leaf`.
1089 unsafe fn cast_unchecked<NewType>(&mut self)
1090 -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
1092 Handle::new_edge(self.node.cast_unchecked(), self.idx)
1095 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
1096 /// between this edge and the key/value pair to the right of this edge. This method assumes
1097 /// that there is enough space in the node for the new pair to fit.
1098 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
1099 // Necessary for correctness, but in an internal module
1100 debug_assert!(self.node.len() < CAPACITY);
1101 debug_assert!(edge.height == self.node.height - 1);
1104 // This cast is a lie, but it allows us to reuse the key/value insertion logic.
1105 self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
1108 slice::from_raw_parts_mut(
1109 MaybeUninit::first_ptr_mut(&mut self.node.as_internal_mut().edges),
1116 for i in (self.idx+1)..(self.node.len()+1) {
1117 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
1122 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
1123 /// between this edge and the key/value pair to the right of this edge. This method splits
1124 /// the node if there isn't enough room.
1125 pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
1126 -> InsertResult<'a, K, V, marker::Internal> {
1128 // Necessary for correctness, but this is an internal module
1129 debug_assert!(edge.height == self.node.height - 1);
1131 if self.node.len() < CAPACITY {
1132 self.insert_fit(key, val, edge);
1133 InsertResult::Fit(Handle::new_kv(self.node, self.idx))
1135 let middle = Handle::new_kv(self.node, B);
1136 let (mut left, k, v, mut right) = middle.split();
1139 Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
1144 right.as_mut().cast_unchecked::<marker::Internal>(),
1146 ).insert_fit(key, val, edge);
1149 InsertResult::Split(left, k, v, right)
1154 impl<BorrowType, K, V>
1155 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1157 /// Finds the node pointed to by this edge.
1159 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1160 /// both, upon success, do nothing.
1161 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1163 height: self.node.height - 1,
1165 (&*self.node.as_internal().edges.get_unchecked(self.idx).as_ptr()).as_ptr()
1167 root: self.node.root,
1168 _marker: PhantomData
1173 impl<'a, K: 'a, V: 'a, NodeType>
1174 Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1176 pub fn into_kv(self) -> (&'a K, &'a V) {
1177 let (keys, vals) = self.node.into_slices();
1179 (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
1184 impl<'a, K: 'a, V: 'a, NodeType>
1185 Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1187 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1188 let (keys, vals) = self.node.into_slices_mut();
1190 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1195 impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1196 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1198 let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
1199 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1204 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1205 /// Splits the underlying node into three parts:
1207 /// - The node is truncated to only contain the key/value pairs to the right of
1209 /// - The key and value pointed to by this handle and extracted.
1210 /// - All the key/value pairs to the right of this handle are put into a newly
1212 pub fn split(mut self)
1213 -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
1214 debug_assert!(!self.node.is_shared_root());
1216 let mut new_node = Box::new(LeafNode::new());
1218 let k = ptr::read(self.node.keys().get_unchecked(self.idx));
1219 let v = ptr::read(self.node.vals().get_unchecked(self.idx));
1221 let new_len = self.node.len() - self.idx - 1;
1223 ptr::copy_nonoverlapping(
1224 self.node.keys().as_ptr().add(self.idx + 1),
1225 new_node.keys.as_mut_ptr() as *mut K,
1228 ptr::copy_nonoverlapping(
1229 self.node.vals().as_ptr().add(self.idx + 1),
1230 new_node.vals.as_mut_ptr() as *mut V,
1234 (*self.node.as_leaf_mut()).len = self.idx as u16;
1235 new_node.len = new_len as u16;
1241 node: BoxedNode::from_leaf(new_node),
1248 /// Removes the key/value pair pointed to by this handle, returning the edge between the
1249 /// now adjacent key/value pairs to the left and right of this handle.
1250 pub fn remove(mut self)
1251 -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
1252 debug_assert!(!self.node.is_shared_root());
1254 let k = slice_remove(self.node.keys_mut(), self.idx);
1255 let v = slice_remove(self.node.vals_mut(), self.idx);
1256 (*self.node.as_leaf_mut()).len -= 1;
1257 (self.left_edge(), k, v)
1262 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1263 /// Splits the underlying node into three parts:
1265 /// - The node is truncated to only contain the edges and key/value pairs to the
1266 /// right of this handle.
1267 /// - The key and value pointed to by this handle and extracted.
1268 /// - All the edges and key/value pairs to the right of this handle are put into
1269 /// a newly allocated node.
1270 pub fn split(mut self)
1271 -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
1273 let mut new_node = Box::new(InternalNode::new());
1275 let k = ptr::read(self.node.keys().get_unchecked(self.idx));
1276 let v = ptr::read(self.node.vals().get_unchecked(self.idx));
1278 let height = self.node.height;
1279 let new_len = self.node.len() - self.idx - 1;
1281 ptr::copy_nonoverlapping(
1282 self.node.keys().as_ptr().add(self.idx + 1),
1283 new_node.data.keys.as_mut_ptr() as *mut K,
1286 ptr::copy_nonoverlapping(
1287 self.node.vals().as_ptr().add(self.idx + 1),
1288 new_node.data.vals.as_mut_ptr() as *mut V,
1291 ptr::copy_nonoverlapping(
1292 self.node.as_internal().edges.as_ptr().add(self.idx + 1),
1293 new_node.edges.as_mut_ptr(),
1297 (*self.node.as_leaf_mut()).len = self.idx as u16;
1298 new_node.data.len = new_len as u16;
1300 let mut new_root = Root {
1301 node: BoxedNode::from_internal(new_node),
1305 for i in 0..(new_len+1) {
1306 Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
1317 /// Returns `true` if it is valid to call `.merge()`, i.e., whether there is enough room in
1318 /// a node to hold the combination of the nodes to the left and right of this handle along
1319 /// with the key/value pair at this handle.
1320 pub fn can_merge(&self) -> bool {
1334 /// Combines the node immediately to the left of this handle, the key/value pair pointed
1335 /// to by this handle, and the node immediately to the right of this handle into one new
1336 /// child of the underlying node, returning an edge referencing that new child.
1338 /// Assumes that this edge `.can_merge()`.
1339 pub fn merge(mut self)
1340 -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1341 let self1 = unsafe { ptr::read(&self) };
1342 let self2 = unsafe { ptr::read(&self) };
1343 let mut left_node = self1.left_edge().descend();
1344 let left_len = left_node.len();
1345 let mut right_node = self2.right_edge().descend();
1346 let right_len = right_node.len();
1348 // necessary for correctness, but in a private module
1349 debug_assert!(left_len + right_len + 1 <= CAPACITY);
1352 ptr::write(left_node.keys_mut().get_unchecked_mut(left_len),
1353 slice_remove(self.node.keys_mut(), self.idx));
1354 ptr::copy_nonoverlapping(
1355 right_node.keys().as_ptr(),
1356 left_node.keys_mut().as_mut_ptr().add(left_len + 1),
1359 ptr::write(left_node.vals_mut().get_unchecked_mut(left_len),
1360 slice_remove(self.node.vals_mut(), self.idx));
1361 ptr::copy_nonoverlapping(
1362 right_node.vals().as_ptr(),
1363 left_node.vals_mut().as_mut_ptr().add(left_len + 1),
1367 slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
1368 for i in self.idx+1..self.node.len() {
1369 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
1371 (*self.node.as_leaf_mut()).len -= 1;
1373 (*left_node.as_leaf_mut()).len += right_len as u16 + 1;
1375 if self.node.height > 1 {
1376 ptr::copy_nonoverlapping(
1377 right_node.cast_unchecked().as_internal().edges.as_ptr(),
1378 left_node.cast_unchecked()
1386 for i in left_len+1..left_len+right_len+2 {
1388 left_node.cast_unchecked().reborrow_mut(),
1390 ).correct_parent_link();
1394 right_node.node.cast(),
1395 Layout::new::<InternalNode<K, V>>(),
1399 right_node.node.cast(),
1400 Layout::new::<LeafNode<K, V>>(),
1404 Handle::new_edge(self.node, self.idx)
1408 /// This removes a key/value pair from the left child and replaces it with the key/value pair
1409 /// pointed to by this handle while pushing the old key/value pair of this handle into the right
1411 pub fn steal_left(&mut self) {
1413 let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
1415 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1416 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1418 match self.reborrow_mut().right_edge().descend().force() {
1419 ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
1420 ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap())
1425 /// This removes a key/value pair from the right child and replaces it with the key/value pair
1426 /// pointed to by this handle while pushing the old key/value pair of this handle into the left
1428 pub fn steal_right(&mut self) {
1430 let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
1432 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1433 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1435 match self.reborrow_mut().left_edge().descend().force() {
1436 ForceResult::Leaf(mut leaf) => leaf.push(k, v),
1437 ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap())
1442 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1443 pub fn bulk_steal_left(&mut self, count: usize) {
1445 let mut left_node = ptr::read(self).left_edge().descend();
1446 let left_len = left_node.len();
1447 let mut right_node = ptr::read(self).right_edge().descend();
1448 let right_len = right_node.len();
1450 // Make sure that we may steal safely.
1451 debug_assert!(right_len + count <= CAPACITY);
1452 debug_assert!(left_len >= count);
1454 let new_left_len = left_len - count;
1458 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1459 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1461 let kv = self.reborrow_mut().into_kv_mut();
1462 (kv.0 as *mut K, kv.1 as *mut V)
1465 // Make room for stolen elements in the right child.
1466 ptr::copy(right_kv.0,
1467 right_kv.0.add(count),
1469 ptr::copy(right_kv.1,
1470 right_kv.1.add(count),
1473 // Move elements from the left child to the right one.
1474 move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
1476 // Move parent's key/value pair to the right child.
1477 move_kv(parent_kv, 0, right_kv, count - 1, 1);
1479 // Move the left-most stolen pair to the parent.
1480 move_kv(left_kv, new_left_len, parent_kv, 0, 1);
1483 (*left_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
1484 (*right_node.reborrow_mut().as_leaf_mut()).len += count as u16;
1486 match (left_node.force(), right_node.force()) {
1487 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1488 // Make room for stolen edges.
1489 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1490 ptr::copy(right_edges,
1491 right_edges.add(count),
1493 right.correct_childrens_parent_links(count, count + right_len + 1);
1495 move_edges(left, new_left_len + 1, right, 0, count);
1497 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1498 _ => { unreachable!(); }
1503 /// The symmetric clone of `bulk_steal_left`.
1504 pub fn bulk_steal_right(&mut self, count: usize) {
1506 let mut left_node = ptr::read(self).left_edge().descend();
1507 let left_len = left_node.len();
1508 let mut right_node = ptr::read(self).right_edge().descend();
1509 let right_len = right_node.len();
1511 // Make sure that we may steal safely.
1512 debug_assert!(left_len + count <= CAPACITY);
1513 debug_assert!(right_len >= count);
1515 let new_right_len = right_len - count;
1519 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1520 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1522 let kv = self.reborrow_mut().into_kv_mut();
1523 (kv.0 as *mut K, kv.1 as *mut V)
1526 // Move parent's key/value pair to the left child.
1527 move_kv(parent_kv, 0, left_kv, left_len, 1);
1529 // Move elements from the right child to the left one.
1530 move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
1532 // Move the right-most stolen pair to the parent.
1533 move_kv(right_kv, count - 1, parent_kv, 0, 1);
1535 // Fix right indexing
1536 ptr::copy(right_kv.0.add(count),
1539 ptr::copy(right_kv.1.add(count),
1544 (*left_node.reborrow_mut().as_leaf_mut()).len += count as u16;
1545 (*right_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
1547 match (left_node.force(), right_node.force()) {
1548 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1549 move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
1551 // Fix right indexing.
1552 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1553 ptr::copy(right_edges.add(count),
1556 right.correct_childrens_parent_links(0, new_right_len + 1);
1558 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1559 _ => { unreachable!(); }
1565 unsafe fn move_kv<K, V>(
1566 source: (*mut K, *mut V), source_offset: usize,
1567 dest: (*mut K, *mut V), dest_offset: usize,
1570 ptr::copy_nonoverlapping(source.0.add(source_offset),
1571 dest.0.add(dest_offset),
1573 ptr::copy_nonoverlapping(source.1.add(source_offset),
1574 dest.1.add(dest_offset),
1578 // Source and destination must have the same height.
1579 unsafe fn move_edges<K, V>(
1580 mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>, source_offset: usize,
1581 mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>, dest_offset: usize,
1584 let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
1585 let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
1586 ptr::copy_nonoverlapping(source_ptr.add(source_offset),
1587 dest_ptr.add(dest_offset),
1589 dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
1592 impl<BorrowType, K, V, HandleType>
1593 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
1595 /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
1596 pub fn force(self) -> ForceResult<
1597 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
1598 Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
1600 match self.node.force() {
1601 ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
1604 _marker: PhantomData
1606 ForceResult::Internal(node) => ForceResult::Internal(Handle {
1609 _marker: PhantomData
1615 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1616 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1617 /// The first edge of `right` remains unchanged.
1618 pub fn move_suffix(&mut self,
1619 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>) {
1621 let left_new_len = self.idx;
1622 let mut left_node = self.reborrow_mut().into_node();
1624 let right_new_len = left_node.len() - left_new_len;
1625 let mut right_node = right.reborrow_mut();
1627 debug_assert!(right_node.len() == 0);
1628 debug_assert!(left_node.height == right_node.height);
1630 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1631 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1634 move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
1636 (*left_node.reborrow_mut().as_leaf_mut()).len = left_new_len as u16;
1637 (*right_node.reborrow_mut().as_leaf_mut()).len = right_new_len as u16;
1639 match (left_node.force(), right_node.force()) {
1640 (ForceResult::Internal(left), ForceResult::Internal(right)) => {
1641 move_edges(left, left_new_len + 1, right, 1, right_new_len);
1643 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1644 _ => { unreachable!(); }
1650 pub enum ForceResult<Leaf, Internal> {
1655 pub enum InsertResult<'a, K, V, Type> {
1656 Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
1657 Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>)
1661 use core::marker::PhantomData;
1664 pub enum Internal { }
1665 pub enum LeafOrInternal { }
1668 pub struct Immut<'a>(PhantomData<&'a ()>);
1669 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1675 unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
1677 slice.as_ptr().add(idx),
1678 slice.as_mut_ptr().add(idx + 1),
1681 ptr::write(slice.get_unchecked_mut(idx), val);
1684 unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
1685 let ret = ptr::read(slice.get_unchecked(idx));
1687 slice.as_ptr().add(idx + 1),
1688 slice.as_mut_ptr().add(idx),
1689 slice.len() - idx - 1