1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // This is an attempt at an implementation following the ideal
14 // struct BTreeMap<K, V> {
16 // root: Option<Box<Node<K, V, height>>>
19 // struct Node<K, V, height: usize> {
20 // keys: [K; 2 * B - 1],
21 // vals: [V; 2 * B - 1],
22 // edges: if height > 0 {
23 // [Box<Node<K, V, height - 1>>; 2 * B]
25 // parent: *const Node<K, V, height + 1>,
31 // Since Rust doesn't actually have dependent types and polymorphic recursion,
32 // we make do with lots of unsafety.
34 // A major goal of this module is to avoid complexity by treating the tree as a generic (if
35 // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
36 // this module doesn't care whether the entries are sorted, which nodes can be underfull, or
37 // even what underfull means. However, we do rely on a few invariants:
39 // - Trees must have uniform depth/height. This means that every path down to a leaf from a
40 // given node has exactly the same length.
41 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
42 // This implies that even an empty internal node has at least one edge.
44 use core::marker::PhantomData;
45 use core::mem::{self, MaybeUninit};
46 use core::ptr::{self, Unique, NonNull};
49 use alloc::{Global, Alloc, Layout};
53 pub const MIN_LEN: usize = B - 1;
54 pub const CAPACITY: usize = 2 * B - 1;
56 /// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
57 /// these, since only the first `len` keys and values are assumed to be initialized. As such,
58 /// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
61 /// We put the metadata first so that its position is the same for every `K` and `V`, in order
62 /// to statically allocate a single dummy node to avoid allocations. This struct is `repr(C)` to
63 /// prevent them from being reordered.
65 struct LeafNode<K, V> {
66 /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
67 /// This either points to an actual node or is null.
68 parent: *const InternalNode<K, V>,
70 /// This node's index into the parent node's `edges` array.
71 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
72 /// This is only guaranteed to be initialized when `parent` is non-null.
73 parent_idx: MaybeUninit<u16>,
75 /// The number of keys and values this node stores.
77 /// This next to `parent_idx` to encourage the compiler to join `len` and
78 /// `parent_idx` into the same 32-bit word, reducing space overhead.
81 /// The arrays storing the actual data of the node. Only the first `len` elements of each
82 /// array are initialized and valid.
83 keys: MaybeUninit<[K; CAPACITY]>,
84 vals: MaybeUninit<[V; CAPACITY]>,
87 impl<K, V> LeafNode<K, V> {
88 /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
89 /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
90 unsafe fn new() -> Self {
92 // As a general policy, we leave fields uninitialized if they can be, as this should
93 // be both slightly faster and easier to track in Valgrind.
94 keys: MaybeUninit::uninitialized(),
95 vals: MaybeUninit::uninitialized(),
97 parent_idx: MaybeUninit::uninitialized(),
102 fn is_shared_root(&self) -> bool {
103 ptr::eq(self, &EMPTY_ROOT_NODE as *const _ as *const _)
107 // We need to implement Sync here in order to make a static instance.
108 unsafe impl Sync for LeafNode<(), ()> {}
110 // An empty node used as a placeholder for the root node, to avoid allocations.
111 // We use () in order to save space, since no operation on an empty tree will
112 // ever take a pointer past the first key.
113 static EMPTY_ROOT_NODE: LeafNode<(), ()> = LeafNode {
115 parent_idx: MaybeUninit::uninitialized(),
117 keys: MaybeUninit::uninitialized(),
118 vals: MaybeUninit::uninitialized(),
121 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
122 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
123 /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
124 /// node, allowing code to act on leaf and internal nodes generically without having to even check
125 /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
127 struct InternalNode<K, V> {
128 data: LeafNode<K, V>,
130 /// The pointers to the children of this node. `len + 1` of these are considered
131 /// initialized and valid.
132 edges: [BoxedNode<K, V>; 2 * B],
135 impl<K, V> InternalNode<K, V> {
136 /// Creates a new `InternalNode`.
138 /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
139 /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
140 /// edges are initialized and valid, meaning that even when the node is empty (having a
141 /// `len` of 0), there must be one initialized and valid edge. This function does not set up
143 unsafe fn new() -> Self {
145 data: LeafNode::new(),
146 edges: mem::uninitialized()
151 /// An owned pointer to a node. This basically is either `Box<LeafNode<K, V>>` or
152 /// `Box<InternalNode<K, V>>`. However, it contains no information as to which of the two types
153 /// of nodes is actually behind the box, and, partially due to this lack of information, has no
155 struct BoxedNode<K, V> {
156 ptr: Unique<LeafNode<K, V>>
159 impl<K, V> BoxedNode<K, V> {
160 fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
161 BoxedNode { ptr: Box::into_unique(node) }
164 fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
166 BoxedNode { ptr: Unique::new_unchecked(Box::into_raw(node) as *mut LeafNode<K, V>) }
170 unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
171 BoxedNode { ptr: Unique::from(ptr) }
174 fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
175 NonNull::from(self.ptr)
179 /// An owned tree. Note that despite being owned, this does not have a destructor,
180 /// and must be cleaned up manually.
181 pub struct Root<K, V> {
182 node: BoxedNode<K, V>,
186 unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> { }
187 unsafe impl<K: Send, V: Send> Send for Root<K, V> { }
189 impl<K, V> Root<K, V> {
190 pub fn is_shared_root(&self) -> bool {
191 self.as_ref().is_shared_root()
194 pub fn shared_empty_root() -> Self {
197 BoxedNode::from_ptr(NonNull::new_unchecked(
198 &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V> as *mut _
205 pub fn new_leaf() -> Self {
207 node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })),
213 -> NodeRef<marker::Immut, K, V, marker::LeafOrInternal> {
216 node: self.node.as_ptr(),
217 root: self as *const _ as *mut _,
218 _marker: PhantomData,
222 pub fn as_mut(&mut self)
223 -> NodeRef<marker::Mut, K, V, marker::LeafOrInternal> {
226 node: self.node.as_ptr(),
227 root: self as *mut _,
228 _marker: PhantomData,
232 pub fn into_ref(self)
233 -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
236 node: self.node.as_ptr(),
237 root: ptr::null_mut(), // FIXME: Is there anything better to do here?
238 _marker: PhantomData,
242 /// Adds a new internal node with a single edge, pointing to the previous root, and make that
243 /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
244 pub fn push_level(&mut self)
245 -> NodeRef<marker::Mut, K, V, marker::Internal> {
246 debug_assert!(!self.is_shared_root());
247 let mut new_node = Box::new(unsafe { InternalNode::new() });
248 new_node.edges[0] = unsafe { BoxedNode::from_ptr(self.node.as_ptr()) };
250 self.node = BoxedNode::from_internal(new_node);
253 let mut ret = NodeRef {
255 node: self.node.as_ptr(),
256 root: self as *mut _,
261 ret.reborrow_mut().first_edge().correct_parent_link();
267 /// Removes the root node, using its first child as the new root. This cannot be called when
268 /// the tree consists only of a leaf node. As it is intended only to be called when the root
269 /// has only one edge, no cleanup is done on any of the other children are elements of the root.
270 /// This decreases the height by 1 and is the opposite of `push_level`.
271 pub fn pop_level(&mut self) {
272 debug_assert!(self.height > 0);
274 let top = self.node.ptr;
277 BoxedNode::from_ptr(self.as_mut()
278 .cast_unchecked::<marker::Internal>()
284 self.as_mut().as_leaf_mut().parent = ptr::null();
287 Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
292 // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
293 // is `Mut`. This is technically wrong, but cannot result in any unsafety due to
294 // internal use of `NodeRef` because we stay completely generic over `K` and `V`.
295 // However, whenever a public type wraps `NodeRef`, make sure that it has the
297 /// A reference to a node.
299 /// This type has a number of parameters that controls how it acts:
300 /// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
301 /// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
302 /// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
303 /// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
304 /// - `K` and `V`: These control what types of things are stored in the nodes.
305 /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
306 /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
307 /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
308 /// `NodeRef` could be pointing to either type of node.
309 pub struct NodeRef<BorrowType, K, V, Type> {
311 node: NonNull<LeafNode<K, V>>,
312 // This is null unless the borrow type is `Mut`
313 root: *const Root<K, V>,
314 _marker: PhantomData<(BorrowType, Type)>
317 impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> { }
318 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
319 fn clone(&self) -> Self {
324 unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync
325 for NodeRef<BorrowType, K, V, Type> { }
327 unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send
328 for NodeRef<marker::Immut<'a>, K, V, Type> { }
329 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send
330 for NodeRef<marker::Mut<'a>, K, V, Type> { }
331 unsafe impl<K: Send, V: Send, Type> Send
332 for NodeRef<marker::Owned, K, V, Type> { }
334 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
335 fn as_internal(&self) -> &InternalNode<K, V> {
337 &*(self.node.as_ptr() as *mut InternalNode<K, V>)
342 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
343 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
345 &mut *(self.node.as_ptr() as *mut InternalNode<K, V>)
351 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
352 /// Finds the length of the node. This is the number of keys or values. In an
353 /// internal node, the number of edges is `len() + 1`.
354 pub fn len(&self) -> usize {
355 self.as_leaf().len as usize
358 /// Returns the height of this node in the whole tree. Zero height denotes the
360 pub fn height(&self) -> usize {
364 /// Removes any static information about whether this node is a `Leaf` or an
366 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
375 /// Temporarily takes out another, immutable reference to the same node.
376 fn reborrow<'a>(&'a self) -> NodeRef<marker::Immut<'a>, K, V, Type> {
385 fn as_leaf(&self) -> &LeafNode<K, V> {
391 pub fn is_shared_root(&self) -> bool {
392 self.as_leaf().is_shared_root()
395 pub fn keys(&self) -> &[K] {
396 self.reborrow().into_key_slice()
399 fn vals(&self) -> &[V] {
400 self.reborrow().into_val_slice()
403 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
404 /// node actually has a parent, where `handle` points to the edge of the parent
405 /// that points to the current node. Returns `Err(self)` if the current node has
406 /// no parent, giving back the original `NodeRef`.
408 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
409 /// both, upon success, do nothing.
410 pub fn ascend(self) -> Result<
421 let parent_as_leaf = self.as_leaf().parent as *const LeafNode<K, V>;
422 if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
425 height: self.height + 1,
430 idx: unsafe { usize::from(*self.as_leaf().parent_idx.get_ref()) },
438 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
439 Handle::new_edge(self, 0)
442 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
443 let len = self.len();
444 Handle::new_edge(self, len)
447 /// Note that `self` must be nonempty.
448 pub fn first_kv(self) -> Handle<Self, marker::KV> {
449 debug_assert!(self.len() > 0);
450 Handle::new_kv(self, 0)
453 /// Note that `self` must be nonempty.
454 pub fn last_kv(self) -> Handle<Self, marker::KV> {
455 let len = self.len();
456 debug_assert!(len > 0);
457 Handle::new_kv(self, len - 1)
461 impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
462 /// Similar to `ascend`, gets a reference to a node's parent node, but also
463 /// deallocate the current node in the process. This is unsafe because the
464 /// current node will still be accessible despite being deallocated.
465 pub unsafe fn deallocate_and_ascend(self) -> Option<
475 debug_assert!(!self.is_shared_root());
476 let node = self.node;
477 let ret = self.ascend().ok();
478 Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
483 impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
484 /// Similar to `ascend`, gets a reference to a node's parent node, but also
485 /// deallocate the current node in the process. This is unsafe because the
486 /// current node will still be accessible despite being deallocated.
487 pub unsafe fn deallocate_and_ascend(self) -> Option<
497 let node = self.node;
498 let ret = self.ascend().ok();
499 Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
504 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
505 /// Unsafely asserts to the compiler some static information about whether this
506 /// node is a `Leaf`.
507 unsafe fn cast_unchecked<NewType>(&mut self)
508 -> NodeRef<marker::Mut, K, V, NewType> {
518 /// Temporarily takes out another, mutable reference to the same node. Beware, as
519 /// this method is very dangerous, doubly so since it may not immediately appear
522 /// Because mutable pointers can roam anywhere around the tree and can even (through
523 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
524 /// can easily be used to make the original mutable pointer dangling, or, in the case
525 /// of a reborrowed handle, out of bounds.
526 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
527 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
528 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut, K, V, Type> {
537 fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
543 fn keys_mut(&mut self) -> &mut [K] {
544 unsafe { self.reborrow_mut().into_key_slice_mut() }
547 fn vals_mut(&mut self) -> &mut [V] {
548 unsafe { self.reborrow_mut().into_val_slice_mut() }
552 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
553 fn into_key_slice(self) -> &'a [K] {
554 // When taking a pointer to the keys, if our key has a stricter
555 // alignment requirement than the shared root does, then the pointer
556 // would be out of bounds, which LLVM assumes will not happen. If the
557 // alignment is more strict, we need to make an empty slice that doesn't
558 // use an out of bounds pointer.
559 if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
562 // Here either it's not the root, or the alignment is less strict,
563 // in which case the keys pointer will point "one-past-the-end" of
564 // the node, which is allowed by LLVM.
566 slice::from_raw_parts(
567 self.as_leaf().keys.as_ptr() as *const K,
574 fn into_val_slice(self) -> &'a [V] {
575 debug_assert!(!self.is_shared_root());
577 slice::from_raw_parts(
578 self.as_leaf().vals.as_ptr() as *const V,
584 fn into_slices(self) -> (&'a [K], &'a [V]) {
585 let k = unsafe { ptr::read(&self) };
586 (k.into_key_slice(), self.into_val_slice())
590 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
591 /// Gets a mutable reference to the root itself. This is useful primarily when the
592 /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
593 pub fn into_root_mut(self) -> &'a mut Root<K, V> {
595 &mut *(self.root as *mut Root<K, V>)
599 fn into_key_slice_mut(mut self) -> &'a mut [K] {
600 if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
604 slice::from_raw_parts_mut(
605 self.as_leaf_mut().keys.as_mut_ptr() as *mut K,
612 fn into_val_slice_mut(mut self) -> &'a mut [V] {
613 debug_assert!(!self.is_shared_root());
615 slice::from_raw_parts_mut(
616 self.as_leaf_mut().vals.as_mut_ptr() as *mut V,
622 fn into_slices_mut(self) -> (&'a mut [K], &'a mut [V]) {
623 let k = unsafe { ptr::read(&self) };
624 (k.into_key_slice_mut(), self.into_val_slice_mut())
628 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
629 /// Adds a key/value pair the end of the node.
630 pub fn push(&mut self, key: K, val: V) {
631 // Necessary for correctness, but this is an internal module
632 debug_assert!(self.len() < CAPACITY);
633 debug_assert!(!self.is_shared_root());
635 let idx = self.len();
638 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
639 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
642 self.as_leaf_mut().len += 1;
645 /// Adds a key/value pair to the beginning of the node.
646 pub fn push_front(&mut self, key: K, val: V) {
647 // Necessary for correctness, but this is an internal module
648 debug_assert!(self.len() < CAPACITY);
649 debug_assert!(!self.is_shared_root());
652 slice_insert(self.keys_mut(), 0, key);
653 slice_insert(self.vals_mut(), 0, val);
656 self.as_leaf_mut().len += 1;
660 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
661 /// Adds a key/value pair and an edge to go to the right of that pair to
662 /// the end of the node.
663 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
664 // Necessary for correctness, but this is an internal module
665 debug_assert!(edge.height == self.height - 1);
666 debug_assert!(self.len() < CAPACITY);
668 let idx = self.len();
671 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
672 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
673 ptr::write(self.as_internal_mut().edges.get_unchecked_mut(idx + 1), edge.node);
675 self.as_leaf_mut().len += 1;
677 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
681 fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
682 for i in first..after_last {
683 Handle::new_edge(unsafe { self.reborrow_mut() }, i).correct_parent_link();
687 fn correct_all_childrens_parent_links(&mut self) {
688 let len = self.len();
689 self.correct_childrens_parent_links(0, len + 1);
692 /// Adds a key/value pair and an edge to go to the left of that pair to
693 /// the beginning of the node.
694 pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
695 // Necessary for correctness, but this is an internal module
696 debug_assert!(edge.height == self.height - 1);
697 debug_assert!(self.len() < CAPACITY);
700 slice_insert(self.keys_mut(), 0, key);
701 slice_insert(self.vals_mut(), 0, val);
703 slice::from_raw_parts_mut(
704 self.as_internal_mut().edges.as_mut_ptr(),
711 self.as_leaf_mut().len += 1;
713 self.correct_all_childrens_parent_links();
718 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
719 /// Removes a key/value pair from the end of this node. If this is an internal node,
720 /// also removes the edge that was to the right of that pair.
721 pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
722 // Necessary for correctness, but this is an internal module
723 debug_assert!(self.len() > 0);
725 let idx = self.len() - 1;
728 let key = ptr::read(self.keys().get_unchecked(idx));
729 let val = ptr::read(self.vals().get_unchecked(idx));
730 let edge = match self.reborrow_mut().force() {
731 ForceResult::Leaf(_) => None,
732 ForceResult::Internal(internal) => {
733 let edge = ptr::read(internal.as_internal().edges.get_unchecked(idx + 1));
734 let mut new_root = Root { node: edge, height: internal.height - 1 };
735 new_root.as_mut().as_leaf_mut().parent = ptr::null();
740 self.as_leaf_mut().len -= 1;
745 /// Removes a key/value pair from the beginning of this node. If this is an internal node,
746 /// also removes the edge that was to the left of that pair.
747 pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
748 // Necessary for correctness, but this is an internal module
749 debug_assert!(self.len() > 0);
751 let old_len = self.len();
754 let key = slice_remove(self.keys_mut(), 0);
755 let val = slice_remove(self.vals_mut(), 0);
756 let edge = match self.reborrow_mut().force() {
757 ForceResult::Leaf(_) => None,
758 ForceResult::Internal(mut internal) => {
759 let edge = slice_remove(
760 slice::from_raw_parts_mut(
761 internal.as_internal_mut().edges.as_mut_ptr(),
767 let mut new_root = Root { node: edge, height: internal.height - 1 };
768 new_root.as_mut().as_leaf_mut().parent = ptr::null();
770 for i in 0..old_len {
771 Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
778 self.as_leaf_mut().len -= 1;
784 fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
786 self.keys_mut().as_mut_ptr(),
787 self.vals_mut().as_mut_ptr()
792 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
793 /// Checks whether a node is an `Internal` node or a `Leaf` node.
794 pub fn force(self) -> ForceResult<
795 NodeRef<BorrowType, K, V, marker::Leaf>,
796 NodeRef<BorrowType, K, V, marker::Internal>
798 if self.height == 0 {
799 ForceResult::Leaf(NodeRef {
806 ForceResult::Internal(NodeRef {
816 /// A reference to a specific key/value pair or edge within a node. The `Node` parameter
817 /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
818 /// pair) or `Edge` (signifying a handle on an edge).
820 /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
821 /// a child node, these represent the spaces where child pointers would go between the key/value
822 /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
823 /// to the left of the node, one between the two pairs, and one at the right of the node.
824 pub struct Handle<Node, Type> {
827 _marker: PhantomData<Type>
830 impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
831 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
832 // `Clone`able is when it is an immutable reference and therefore `Copy`.
833 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
834 fn clone(&self) -> Self {
839 impl<Node, Type> Handle<Node, Type> {
840 /// Retrieves the node that contains the edge of key/value pair this handle points to.
841 pub fn into_node(self) -> Node {
846 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
847 /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`.
848 pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
849 // Necessary for correctness, but in a private module
850 debug_assert!(idx < node.len());
859 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
860 Handle::new_edge(self.node, self.idx)
863 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
864 Handle::new_edge(self.node, self.idx + 1)
868 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
869 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
871 fn eq(&self, other: &Self) -> bool {
872 self.node.node == other.node.node && self.idx == other.idx
876 impl<BorrowType, K, V, NodeType, HandleType>
877 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
879 /// Temporarily takes out another, immutable handle on the same location.
880 pub fn reborrow(&self)
881 -> Handle<NodeRef<marker::Immut, K, V, NodeType>, HandleType> {
883 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
885 node: self.node.reborrow(),
892 impl<'a, K, V, NodeType, HandleType>
893 Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
895 /// Temporarily takes out another, mutable handle on the same location. Beware, as
896 /// this method is very dangerous, doubly so since it may not immediately appear
899 /// Because mutable pointers can roam anywhere around the tree and can even (through
900 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
901 /// can easily be used to make the original mutable pointer dangling, or, in the case
902 /// of a reborrowed handle, out of bounds.
903 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
904 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
905 pub unsafe fn reborrow_mut(&mut self)
906 -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
908 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
910 node: self.node.reborrow_mut(),
917 impl<BorrowType, K, V, NodeType>
918 Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
920 /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
922 pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
923 // Necessary for correctness, but in a private module
924 debug_assert!(idx <= node.len());
934 -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
937 Ok(Handle::new_kv(self.node, self.idx - 1))
943 pub fn right_kv(self)
944 -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
946 if self.idx < self.node.len() {
947 Ok(Handle::new_kv(self.node, self.idx))
954 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
955 /// Inserts a new key/value pair between the key/value pairs to the right and left of
956 /// this edge. This method assumes that there is enough space in the node for the new
959 /// The returned pointer points to the inserted value.
960 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
961 // Necessary for correctness, but in a private module
962 debug_assert!(self.node.len() < CAPACITY);
963 debug_assert!(!self.node.is_shared_root());
966 slice_insert(self.node.keys_mut(), self.idx, key);
967 slice_insert(self.node.vals_mut(), self.idx, val);
969 self.node.as_leaf_mut().len += 1;
971 self.node.vals_mut().get_unchecked_mut(self.idx)
975 /// Inserts a new key/value pair between the key/value pairs to the right and left of
976 /// this edge. This method splits the node if there isn't enough room.
978 /// The returned pointer points to the inserted value.
979 pub fn insert(mut self, key: K, val: V)
980 -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
982 if self.node.len() < CAPACITY {
983 let ptr = self.insert_fit(key, val);
984 (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr)
986 let middle = Handle::new_kv(self.node, B);
987 let (mut left, k, v, mut right) = middle.split();
988 let ptr = if self.idx <= B {
990 Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val)
995 right.as_mut().cast_unchecked::<marker::Leaf>(),
997 ).insert_fit(key, val)
1000 (InsertResult::Split(left, k, v, right), ptr)
1005 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1006 /// Fixes the parent pointer and index in the child node below this edge. This is useful
1007 /// when the ordering of edges has been changed, such as in the various `insert` methods.
1008 fn correct_parent_link(mut self) {
1009 let idx = self.idx as u16;
1010 let ptr = self.node.as_internal_mut() as *mut _;
1011 let mut child = self.descend();
1012 child.as_leaf_mut().parent = ptr;
1013 child.as_leaf_mut().parent_idx.set(idx);
1016 /// Unsafely asserts to the compiler some static information about whether the underlying
1017 /// node of this handle is a `Leaf`.
1018 unsafe fn cast_unchecked<NewType>(&mut self)
1019 -> Handle<NodeRef<marker::Mut, K, V, NewType>, marker::Edge> {
1021 Handle::new_edge(self.node.cast_unchecked(), self.idx)
1024 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
1025 /// between this edge and the key/value pair to the right of this edge. This method assumes
1026 /// that there is enough space in the node for the new pair to fit.
1027 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
1028 // Necessary for correctness, but in an internal module
1029 debug_assert!(self.node.len() < CAPACITY);
1030 debug_assert!(edge.height == self.node.height - 1);
1033 // This cast is a lie, but it allows us to reuse the key/value insertion logic.
1034 self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
1037 slice::from_raw_parts_mut(
1038 self.node.as_internal_mut().edges.as_mut_ptr(),
1045 for i in (self.idx+1)..(self.node.len()+1) {
1046 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
1051 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
1052 /// between this edge and the key/value pair to the right of this edge. This method splits
1053 /// the node if there isn't enough room.
1054 pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
1055 -> InsertResult<'a, K, V, marker::Internal> {
1057 // Necessary for correctness, but this is an internal module
1058 debug_assert!(edge.height == self.node.height - 1);
1060 if self.node.len() < CAPACITY {
1061 self.insert_fit(key, val, edge);
1062 InsertResult::Fit(Handle::new_kv(self.node, self.idx))
1064 let middle = Handle::new_kv(self.node, B);
1065 let (mut left, k, v, mut right) = middle.split();
1068 Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
1073 right.as_mut().cast_unchecked::<marker::Internal>(),
1075 ).insert_fit(key, val, edge);
1078 InsertResult::Split(left, k, v, right)
1083 impl<BorrowType, K, V>
1084 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1086 /// Finds the node pointed to by this edge.
1088 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1089 /// both, upon success, do nothing.
1090 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1092 height: self.node.height - 1,
1093 node: unsafe { self.node.as_internal().edges.get_unchecked(self.idx).as_ptr() },
1094 root: self.node.root,
1095 _marker: PhantomData
1100 impl<'a, K: 'a, V: 'a, NodeType>
1101 Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1103 pub fn into_kv(self) -> (&'a K, &'a V) {
1104 let (keys, vals) = self.node.into_slices();
1106 (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
1111 impl<'a, K: 'a, V: 'a, NodeType>
1112 Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1114 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1115 let (keys, vals) = self.node.into_slices_mut();
1117 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1122 impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1123 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1125 let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
1126 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1131 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1132 /// Splits the underlying node into three parts:
1134 /// - The node is truncated to only contain the key/value pairs to the right of
1136 /// - The key and value pointed to by this handle and extracted.
1137 /// - All the key/value pairs to the right of this handle are put into a newly
1139 pub fn split(mut self)
1140 -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
1141 debug_assert!(!self.node.is_shared_root());
1143 let mut new_node = Box::new(LeafNode::new());
1145 let k = ptr::read(self.node.keys().get_unchecked(self.idx));
1146 let v = ptr::read(self.node.vals().get_unchecked(self.idx));
1148 let new_len = self.node.len() - self.idx - 1;
1150 ptr::copy_nonoverlapping(
1151 self.node.keys().as_ptr().add(self.idx + 1),
1152 new_node.keys.as_mut_ptr() as *mut K,
1155 ptr::copy_nonoverlapping(
1156 self.node.vals().as_ptr().add(self.idx + 1),
1157 new_node.vals.as_mut_ptr() as *mut V,
1161 self.node.as_leaf_mut().len = self.idx as u16;
1162 new_node.len = new_len as u16;
1168 node: BoxedNode::from_leaf(new_node),
1175 /// Removes the key/value pair pointed to by this handle, returning the edge between the
1176 /// now adjacent key/value pairs to the left and right of this handle.
1177 pub fn remove(mut self)
1178 -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
1179 debug_assert!(!self.node.is_shared_root());
1181 let k = slice_remove(self.node.keys_mut(), self.idx);
1182 let v = slice_remove(self.node.vals_mut(), self.idx);
1183 self.node.as_leaf_mut().len -= 1;
1184 (self.left_edge(), k, v)
1189 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1190 /// Splits the underlying node into three parts:
1192 /// - The node is truncated to only contain the edges and key/value pairs to the
1193 /// right of this handle.
1194 /// - The key and value pointed to by this handle and extracted.
1195 /// - All the edges and key/value pairs to the right of this handle are put into
1196 /// a newly allocated node.
1197 pub fn split(mut self)
1198 -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
1200 let mut new_node = Box::new(InternalNode::new());
1202 let k = ptr::read(self.node.keys().get_unchecked(self.idx));
1203 let v = ptr::read(self.node.vals().get_unchecked(self.idx));
1205 let height = self.node.height;
1206 let new_len = self.node.len() - self.idx - 1;
1208 ptr::copy_nonoverlapping(
1209 self.node.keys().as_ptr().add(self.idx + 1),
1210 new_node.data.keys.as_mut_ptr() as *mut K,
1213 ptr::copy_nonoverlapping(
1214 self.node.vals().as_ptr().add(self.idx + 1),
1215 new_node.data.vals.as_mut_ptr() as *mut V,
1218 ptr::copy_nonoverlapping(
1219 self.node.as_internal().edges.as_ptr().add(self.idx + 1),
1220 new_node.edges.as_mut_ptr(),
1224 self.node.as_leaf_mut().len = self.idx as u16;
1225 new_node.data.len = new_len as u16;
1227 let mut new_root = Root {
1228 node: BoxedNode::from_internal(new_node),
1232 for i in 0..(new_len+1) {
1233 Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
1244 /// Returns whether it is valid to call `.merge()`, i.e., whether there is enough room in
1245 /// a node to hold the combination of the nodes to the left and right of this handle along
1246 /// with the key/value pair at this handle.
1247 pub fn can_merge(&self) -> bool {
1261 /// Combines the node immediately to the left of this handle, the key/value pair pointed
1262 /// to by this handle, and the node immediately to the right of this handle into one new
1263 /// child of the underlying node, returning an edge referencing that new child.
1265 /// Assumes that this edge `.can_merge()`.
1266 pub fn merge(mut self)
1267 -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1268 let self1 = unsafe { ptr::read(&self) };
1269 let self2 = unsafe { ptr::read(&self) };
1270 let mut left_node = self1.left_edge().descend();
1271 let left_len = left_node.len();
1272 let mut right_node = self2.right_edge().descend();
1273 let right_len = right_node.len();
1275 // necessary for correctness, but in a private module
1276 debug_assert!(left_len + right_len + 1 <= CAPACITY);
1279 ptr::write(left_node.keys_mut().get_unchecked_mut(left_len),
1280 slice_remove(self.node.keys_mut(), self.idx));
1281 ptr::copy_nonoverlapping(
1282 right_node.keys().as_ptr(),
1283 left_node.keys_mut().as_mut_ptr().add(left_len + 1),
1286 ptr::write(left_node.vals_mut().get_unchecked_mut(left_len),
1287 slice_remove(self.node.vals_mut(), self.idx));
1288 ptr::copy_nonoverlapping(
1289 right_node.vals().as_ptr(),
1290 left_node.vals_mut().as_mut_ptr().add(left_len + 1),
1294 slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
1295 for i in self.idx+1..self.node.len() {
1296 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
1298 self.node.as_leaf_mut().len -= 1;
1300 left_node.as_leaf_mut().len += right_len as u16 + 1;
1302 if self.node.height > 1 {
1303 ptr::copy_nonoverlapping(
1304 right_node.cast_unchecked().as_internal().edges.as_ptr(),
1305 left_node.cast_unchecked()
1313 for i in left_len+1..left_len+right_len+2 {
1315 left_node.cast_unchecked().reborrow_mut(),
1317 ).correct_parent_link();
1321 right_node.node.cast(),
1322 Layout::new::<InternalNode<K, V>>(),
1326 right_node.node.cast(),
1327 Layout::new::<LeafNode<K, V>>(),
1331 Handle::new_edge(self.node, self.idx)
1335 /// This removes a key/value pair from the left child and replaces it with the key/value pair
1336 /// pointed to by this handle while pushing the old key/value pair of this handle into the right
1338 pub fn steal_left(&mut self) {
1340 let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
1342 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1343 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1345 match self.reborrow_mut().right_edge().descend().force() {
1346 ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
1347 ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap())
1352 /// This removes a key/value pair from the right child and replaces it with the key/value pair
1353 /// pointed to by this handle while pushing the old key/value pair of this handle into the left
1355 pub fn steal_right(&mut self) {
1357 let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
1359 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1360 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1362 match self.reborrow_mut().left_edge().descend().force() {
1363 ForceResult::Leaf(mut leaf) => leaf.push(k, v),
1364 ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap())
1369 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1370 pub fn bulk_steal_left(&mut self, count: usize) {
1372 let mut left_node = ptr::read(self).left_edge().descend();
1373 let left_len = left_node.len();
1374 let mut right_node = ptr::read(self).right_edge().descend();
1375 let right_len = right_node.len();
1377 // Make sure that we may steal safely.
1378 debug_assert!(right_len + count <= CAPACITY);
1379 debug_assert!(left_len >= count);
1381 let new_left_len = left_len - count;
1385 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1386 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1388 let kv = self.reborrow_mut().into_kv_mut();
1389 (kv.0 as *mut K, kv.1 as *mut V)
1392 // Make room for stolen elements in the right child.
1393 ptr::copy(right_kv.0,
1394 right_kv.0.add(count),
1396 ptr::copy(right_kv.1,
1397 right_kv.1.add(count),
1400 // Move elements from the left child to the right one.
1401 move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
1403 // Move parent's key/value pair to the right child.
1404 move_kv(parent_kv, 0, right_kv, count - 1, 1);
1406 // Move the left-most stolen pair to the parent.
1407 move_kv(left_kv, new_left_len, parent_kv, 0, 1);
1410 left_node.reborrow_mut().as_leaf_mut().len -= count as u16;
1411 right_node.reborrow_mut().as_leaf_mut().len += count as u16;
1413 match (left_node.force(), right_node.force()) {
1414 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1415 // Make room for stolen edges.
1416 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1417 ptr::copy(right_edges,
1418 right_edges.add(count),
1420 right.correct_childrens_parent_links(count, count + right_len + 1);
1422 move_edges(left, new_left_len + 1, right, 0, count);
1424 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1425 _ => { unreachable!(); }
1430 /// The symmetric clone of `bulk_steal_left`.
1431 pub fn bulk_steal_right(&mut self, count: usize) {
1433 let mut left_node = ptr::read(self).left_edge().descend();
1434 let left_len = left_node.len();
1435 let mut right_node = ptr::read(self).right_edge().descend();
1436 let right_len = right_node.len();
1438 // Make sure that we may steal safely.
1439 debug_assert!(left_len + count <= CAPACITY);
1440 debug_assert!(right_len >= count);
1442 let new_right_len = right_len - count;
1446 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1447 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1449 let kv = self.reborrow_mut().into_kv_mut();
1450 (kv.0 as *mut K, kv.1 as *mut V)
1453 // Move parent's key/value pair to the left child.
1454 move_kv(parent_kv, 0, left_kv, left_len, 1);
1456 // Move elements from the right child to the left one.
1457 move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
1459 // Move the right-most stolen pair to the parent.
1460 move_kv(right_kv, count - 1, parent_kv, 0, 1);
1462 // Fix right indexing
1463 ptr::copy(right_kv.0.add(count),
1466 ptr::copy(right_kv.1.add(count),
1471 left_node.reborrow_mut().as_leaf_mut().len += count as u16;
1472 right_node.reborrow_mut().as_leaf_mut().len -= count as u16;
1474 match (left_node.force(), right_node.force()) {
1475 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1476 move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
1478 // Fix right indexing.
1479 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1480 ptr::copy(right_edges.add(count),
1483 right.correct_childrens_parent_links(0, new_right_len + 1);
1485 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1486 _ => { unreachable!(); }
1492 unsafe fn move_kv<K, V>(
1493 source: (*mut K, *mut V), source_offset: usize,
1494 dest: (*mut K, *mut V), dest_offset: usize,
1497 ptr::copy_nonoverlapping(source.0.add(source_offset),
1498 dest.0.add(dest_offset),
1500 ptr::copy_nonoverlapping(source.1.add(source_offset),
1501 dest.1.add(dest_offset),
1505 // Source and destination must have the same height.
1506 unsafe fn move_edges<K, V>(
1507 mut source: NodeRef<marker::Mut, K, V, marker::Internal>, source_offset: usize,
1508 mut dest: NodeRef<marker::Mut, K, V, marker::Internal>, dest_offset: usize,
1511 let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
1512 let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
1513 ptr::copy_nonoverlapping(source_ptr.add(source_offset),
1514 dest_ptr.add(dest_offset),
1516 dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
1519 impl<BorrowType, K, V, HandleType>
1520 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
1522 /// Check whether the underlying node is an `Internal` node or a `Leaf` node.
1523 pub fn force(self) -> ForceResult<
1524 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
1525 Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
1527 match self.node.force() {
1528 ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
1531 _marker: PhantomData
1533 ForceResult::Internal(node) => ForceResult::Internal(Handle {
1536 _marker: PhantomData
1542 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1543 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1544 /// The first edge of `right` remains unchanged.
1545 pub fn move_suffix(&mut self,
1546 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>) {
1548 let left_new_len = self.idx;
1549 let mut left_node = self.reborrow_mut().into_node();
1551 let right_new_len = left_node.len() - left_new_len;
1552 let mut right_node = right.reborrow_mut();
1554 debug_assert!(right_node.len() == 0);
1555 debug_assert!(left_node.height == right_node.height);
1557 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1558 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1561 move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
1563 left_node.reborrow_mut().as_leaf_mut().len = left_new_len as u16;
1564 right_node.reborrow_mut().as_leaf_mut().len = right_new_len as u16;
1566 match (left_node.force(), right_node.force()) {
1567 (ForceResult::Internal(left), ForceResult::Internal(right)) => {
1568 move_edges(left, left_new_len + 1, right, 1, right_new_len);
1570 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1571 _ => { unreachable!(); }
1577 pub enum ForceResult<Leaf, Internal> {
1582 pub enum InsertResult<'a, K, V, Type> {
1583 Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
1584 Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>)
1588 use core::marker::PhantomData;
1591 pub enum Internal { }
1592 pub enum LeafOrInternal { }
1595 pub struct Immut<'a>(PhantomData<&'a ()>);
1596 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1602 unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
1604 slice.as_ptr().add(idx),
1605 slice.as_mut_ptr().add(idx + 1),
1608 ptr::write(slice.get_unchecked_mut(idx), val);
1611 unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
1612 let ret = ptr::read(slice.get_unchecked(idx));
1614 slice.as_ptr().add(idx + 1),
1615 slice.as_mut_ptr().add(idx),
1616 slice.len() - idx - 1