1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // This is an attempt at an implementation following the ideal
14 // struct BTreeMap<K, V> {
16 // root: Option<Box<Node<K, V, height>>>
19 // struct Node<K, V, height: usize> {
20 // keys: [K; 2 * B - 1],
21 // vals: [V; 2 * B - 1],
22 // edges: if height > 0 {
23 // [Box<Node<K, V, height - 1>>; 2 * B]
25 // parent: *const Node<K, V, height + 1>,
31 // Since Rust doesn't actually have dependent types and polymorphic recursion,
32 // we make do with lots of unsafety.
34 // A major goal of this module is to avoid complexity by treating the tree as a generic (if
35 // weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
36 // this module doesn't care whether the entries are sorted, which nodes can be underfull, or
37 // even what underfull means. However, we do rely on a few invariants:
39 // - Trees must have uniform depth/height. This means that every path down to a leaf from a
40 // given node has exactly the same length.
41 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
42 // This implies that even an empty internal node has at least one edge.
44 use core::marker::PhantomData;
46 use core::nonzero::NonZero;
47 use core::ptr::{self, Unique};
54 pub const MIN_LEN: usize = B - 1;
55 pub const CAPACITY: usize = 2 * B - 1;
57 /// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
58 /// these, since only the first `len` keys and values are assumed to be initialized. As such,
59 /// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
62 /// See also rust-lang/rfcs#197, which would make this structure significantly more safe by
63 /// avoiding accidentally dropping unused and uninitialized keys and values.
64 struct LeafNode<K, V> {
65 /// The arrays storing the actual data of the node. Only the first `len` elements of each
66 /// array are initialized and valid.
70 /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
71 /// This either points to an actual node or is null.
72 parent: *const InternalNode<K, V>,
74 /// This node's index into the parent node's `edges` array.
75 /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
76 /// This is only guaranteed to be initialized when `parent` is nonnull.
79 /// The number of keys and values this node stores.
81 /// This is at the end of the node's representation and next to `parent_idx` to encourage
82 /// the compiler to join `len` and `parent_idx` into the same 32-bit word, reducing space
87 impl<K, V> LeafNode<K, V> {
88 /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
89 /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
90 unsafe fn new() -> Self {
92 // As a general policy, we leave fields uninitialized if they can be, as this should
93 // be both slightly faster and easier to track in Valgrind.
94 keys: mem::uninitialized(),
95 vals: mem::uninitialized(),
97 parent_idx: mem::uninitialized(),
103 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
104 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
105 /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
106 /// node, allowing code to act on leaf and internal nodes generically without having to even check
107 /// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
109 struct InternalNode<K, V> {
110 data: LeafNode<K, V>,
112 /// The pointers to the children of this node. `len + 1` of these are considered
113 /// initialized and valid.
114 edges: [BoxedNode<K, V>; 2 * B],
117 impl<K, V> InternalNode<K, V> {
118 /// Creates a new `InternalNode`.
120 /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
121 /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
122 /// edges are initialized and valid, meaning that even when the node is empty (having a
123 /// `len` of 0), there must be one initialized and valid edge. This function does not set up
125 unsafe fn new() -> Self {
127 data: LeafNode::new(),
128 edges: mem::uninitialized()
133 /// An owned pointer to a node. This basically is either `Box<LeafNode<K, V>>` or
134 /// `Box<InternalNode<K, V>>`. However, it contains no information as to which of the two types
135 /// of nodes is acutally behind the box, and, partially due to this lack of information, has no
137 struct BoxedNode<K, V> {
138 ptr: Unique<LeafNode<K, V>>
141 impl<K, V> BoxedNode<K, V> {
142 fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
144 BoxedNode { ptr: Unique::new(Box::into_raw(node)) }
148 fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
150 BoxedNode { ptr: Unique::new(Box::into_raw(node) as *mut LeafNode<K, V>) }
154 unsafe fn from_ptr(ptr: NonZero<*const LeafNode<K, V>>) -> Self {
155 BoxedNode { ptr: Unique::new(ptr.get() as *mut LeafNode<K, V>) }
158 fn as_ptr(&self) -> NonZero<*const LeafNode<K, V>> {
160 NonZero::new(self.ptr.as_ptr())
165 /// An owned tree. Note that despite being owned, this does not have a destructor,
166 /// and must be cleaned up manually.
167 pub struct Root<K, V> {
168 node: BoxedNode<K, V>,
172 unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> { }
173 unsafe impl<K: Send, V: Send> Send for Root<K, V> { }
175 impl<K, V> Root<K, V> {
176 pub fn new_leaf() -> Self {
178 node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })),
184 -> NodeRef<marker::Immut, K, V, marker::LeafOrInternal> {
187 node: self.node.as_ptr(),
188 root: self as *const _ as *mut _,
189 _marker: PhantomData,
193 pub fn as_mut(&mut self)
194 -> NodeRef<marker::Mut, K, V, marker::LeafOrInternal> {
197 node: self.node.as_ptr(),
198 root: self as *mut _,
199 _marker: PhantomData,
203 pub fn into_ref(self)
204 -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
207 node: self.node.as_ptr(),
208 root: ptr::null_mut(), // FIXME: Is there anything better to do here?
209 _marker: PhantomData,
213 /// Adds a new internal node with a single edge, pointing to the previous root, and make that
214 /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
215 pub fn push_level(&mut self)
216 -> NodeRef<marker::Mut, K, V, marker::Internal> {
217 let mut new_node = Box::new(unsafe { InternalNode::new() });
218 new_node.edges[0] = unsafe { BoxedNode::from_ptr(self.node.as_ptr()) };
220 self.node = BoxedNode::from_internal(new_node);
223 let mut ret = NodeRef {
225 node: self.node.as_ptr(),
226 root: self as *mut _,
231 ret.reborrow_mut().first_edge().correct_parent_link();
237 /// Removes the root node, using its first child as the new root. This cannot be called when
238 /// the tree consists only of a leaf node. As it is intended only to be called when the root
239 /// has only one edge, no cleanup is done on any of the other children are elements of the root.
240 /// This decreases the height by 1 and is the opposite of `push_level`.
241 pub fn pop_level(&mut self) {
242 debug_assert!(self.height > 0);
244 let top = self.node.ptr.as_ptr() as *mut u8;
247 BoxedNode::from_ptr(self.as_mut()
248 .cast_unchecked::<marker::Internal>()
254 self.as_mut().as_leaf_mut().parent = ptr::null();
259 mem::size_of::<InternalNode<K, V>>(),
260 mem::align_of::<InternalNode<K, V>>()
266 // N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
267 // is `Mut`. This is technically wrong, but cannot result in any unsafety due to
268 // internal use of `NodeRef` because we stay completely generic over `K` and `V`.
269 // However, whenever a public type wraps `NodeRef`, make sure that it has the
271 /// A reference to a node.
273 /// This type has a number of paramaters that controls how it acts:
274 /// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
275 /// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
276 /// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
277 /// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
278 /// - `K` and `V`: These control what types of things are stored in the nodes.
279 /// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
280 /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
281 /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
282 /// `NodeRef` could be pointing to either type of node.
283 pub struct NodeRef<BorrowType, K, V, Type> {
285 node: NonZero<*const LeafNode<K, V>>,
286 // This is null unless the borrow type is `Mut`
287 root: *const Root<K, V>,
288 _marker: PhantomData<(BorrowType, Type)>
291 impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> { }
292 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
293 fn clone(&self) -> Self {
298 unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync
299 for NodeRef<BorrowType, K, V, Type> { }
301 unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send
302 for NodeRef<marker::Immut<'a>, K, V, Type> { }
303 unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send
304 for NodeRef<marker::Mut<'a>, K, V, Type> { }
305 unsafe impl<K: Send, V: Send, Type> Send
306 for NodeRef<marker::Owned, K, V, Type> { }
308 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
309 fn as_internal(&self) -> &InternalNode<K, V> {
311 &*(self.node.get() as *const InternalNode<K, V>)
316 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
317 fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
319 &mut *(self.node.get() as *mut InternalNode<K, V>)
325 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
326 /// Finds the length of the node. This is the number of keys or values. In an
327 /// internal node, the number of edges is `len() + 1`.
328 pub fn len(&self) -> usize {
329 self.as_leaf().len as usize
332 /// Returns the height of this node in the whole tree. Zero height denotes the
334 pub fn height(&self) -> usize {
338 /// Removes any static information about whether this node is a `Leaf` or an
340 pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
349 /// Temporarily takes out another, immutable reference to the same node.
350 fn reborrow<'a>(&'a self) -> NodeRef<marker::Immut<'a>, K, V, Type> {
359 fn as_leaf(&self) -> &LeafNode<K, V> {
365 pub fn keys(&self) -> &[K] {
366 self.reborrow().into_slices().0
369 pub fn vals(&self) -> &[V] {
370 self.reborrow().into_slices().1
373 /// Finds the parent of the current node. Returns `Ok(handle)` if the current
374 /// node actually has a parent, where `handle` points to the edge of the parent
375 /// that points to the current node. Returns `Err(self)` if the current node has
376 /// no parent, giving back the original `NodeRef`.
378 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
379 /// both, upon success, do nothing.
380 pub fn ascend(self) -> Result<
391 if self.as_leaf().parent.is_null() {
396 height: self.height + 1,
398 NonZero::new(self.as_leaf().parent as *mut LeafNode<K, V>)
403 idx: self.as_leaf().parent_idx as usize,
409 pub fn first_edge(self) -> Handle<Self, marker::Edge> {
410 Handle::new_edge(self, 0)
413 pub fn last_edge(self) -> Handle<Self, marker::Edge> {
414 let len = self.len();
415 Handle::new_edge(self, len)
418 /// Note that `self` must be nonempty.
419 pub fn first_kv(self) -> Handle<Self, marker::KV> {
420 debug_assert!(self.len() > 0);
421 Handle::new_kv(self, 0)
424 /// Note that `self` must be nonempty.
425 pub fn last_kv(self) -> Handle<Self, marker::KV> {
426 let len = self.len();
427 debug_assert!(len > 0);
428 Handle::new_kv(self, len - 1)
432 impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
433 /// Similar to `ascend`, gets a reference to a node's parent node, but also
434 /// deallocate the current node in the process. This is unsafe because the
435 /// current node will still be accessible despite being deallocated.
436 pub unsafe fn deallocate_and_ascend(self) -> Option<
446 let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
447 let ret = self.ascend().ok();
448 heap::deallocate(ptr, mem::size_of::<LeafNode<K, V>>(), mem::align_of::<LeafNode<K, V>>());
453 impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
454 /// Similar to `ascend`, gets a reference to a node's parent node, but also
455 /// deallocate the current node in the process. This is unsafe because the
456 /// current node will still be accessible despite being deallocated.
457 pub unsafe fn deallocate_and_ascend(self) -> Option<
467 let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
468 let ret = self.ascend().ok();
471 mem::size_of::<InternalNode<K, V>>(),
472 mem::align_of::<InternalNode<K, V>>()
478 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
479 /// Unsafely asserts to the compiler some static information about whether this
480 /// node is a `Leaf`.
481 unsafe fn cast_unchecked<NewType>(&mut self)
482 -> NodeRef<marker::Mut, K, V, NewType> {
492 /// Temporarily takes out another, mutable reference to the same node. Beware, as
493 /// this method is very dangerous, doubly so since it may not immediately appear
496 /// Because mutable pointers can roam anywhere around the tree and can even (through
497 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
498 /// can easily be used to make the original mutable pointer dangling, or, in the case
499 /// of a reborrowed handle, out of bounds.
500 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
501 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
502 unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut, K, V, Type> {
511 fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
513 &mut *(self.node.get() as *mut LeafNode<K, V>)
517 pub fn keys_mut(&mut self) -> &mut [K] {
518 unsafe { self.reborrow_mut().into_slices_mut().0 }
521 pub fn vals_mut(&mut self) -> &mut [V] {
522 unsafe { self.reborrow_mut().into_slices_mut().1 }
526 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
527 pub fn into_slices(self) -> (&'a [K], &'a [V]) {
530 slice::from_raw_parts(
531 self.as_leaf().keys.as_ptr(),
534 slice::from_raw_parts(
535 self.as_leaf().vals.as_ptr(),
543 impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
544 /// Gets a mutable reference to the root itself. This is useful primarily when the
545 /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
546 pub fn into_root_mut(self) -> &'a mut Root<K, V> {
548 &mut *(self.root as *mut Root<K, V>)
552 pub fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
555 slice::from_raw_parts_mut(
556 &mut self.as_leaf_mut().keys as *mut [K] as *mut K,
559 slice::from_raw_parts_mut(
560 &mut self.as_leaf_mut().vals as *mut [V] as *mut V,
568 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
569 /// Adds a key/value pair the end of the node.
570 pub fn push(&mut self, key: K, val: V) {
571 // Necessary for correctness, but this is an internal module
572 debug_assert!(self.len() < CAPACITY);
574 let idx = self.len();
577 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
578 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
581 self.as_leaf_mut().len += 1;
584 /// Adds a key/value pair to the beginning of the node.
585 pub fn push_front(&mut self, key: K, val: V) {
586 // Necessary for correctness, but this is an internal module
587 debug_assert!(self.len() < CAPACITY);
590 slice_insert(self.keys_mut(), 0, key);
591 slice_insert(self.vals_mut(), 0, val);
594 self.as_leaf_mut().len += 1;
598 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
599 /// Adds a key/value pair and an edge to go to the right of that pair to
600 /// the end of the node.
601 pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
602 // Necessary for correctness, but this is an internal module
603 debug_assert!(edge.height == self.height - 1);
604 debug_assert!(self.len() < CAPACITY);
606 let idx = self.len();
609 ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
610 ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
611 ptr::write(self.as_internal_mut().edges.get_unchecked_mut(idx + 1), edge.node);
613 self.as_leaf_mut().len += 1;
615 Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
619 fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
620 for i in first..after_last {
621 Handle::new_edge(unsafe { self.reborrow_mut() }, i).correct_parent_link();
625 fn correct_all_childrens_parent_links(&mut self) {
626 let len = self.len();
627 self.correct_childrens_parent_links(0, len + 1);
630 /// Adds a key/value pair and an edge to go to the left of that pair to
631 /// the beginning of the node.
632 pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
633 // Necessary for correctness, but this is an internal module
634 debug_assert!(edge.height == self.height - 1);
635 debug_assert!(self.len() < CAPACITY);
638 slice_insert(self.keys_mut(), 0, key);
639 slice_insert(self.vals_mut(), 0, val);
641 slice::from_raw_parts_mut(
642 self.as_internal_mut().edges.as_mut_ptr(),
649 self.as_leaf_mut().len += 1;
651 self.correct_all_childrens_parent_links();
656 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
657 /// Removes a key/value pair from the end of this node. If this is an internal node,
658 /// also removes the edge that was to the right of that pair.
659 pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
660 // Necessary for correctness, but this is an internal module
661 debug_assert!(self.len() > 0);
663 let idx = self.len() - 1;
666 let key = ptr::read(self.keys().get_unchecked(idx));
667 let val = ptr::read(self.vals().get_unchecked(idx));
668 let edge = match self.reborrow_mut().force() {
669 ForceResult::Leaf(_) => None,
670 ForceResult::Internal(internal) => {
671 let edge = ptr::read(internal.as_internal().edges.get_unchecked(idx + 1));
672 let mut new_root = Root { node: edge, height: internal.height - 1 };
673 new_root.as_mut().as_leaf_mut().parent = ptr::null();
678 self.as_leaf_mut().len -= 1;
683 /// Removes a key/value pair from the beginning of this node. If this is an internal node,
684 /// also removes the edge that was to the left of that pair.
685 pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
686 // Necessary for correctness, but this is an internal module
687 debug_assert!(self.len() > 0);
689 let old_len = self.len();
692 let key = slice_remove(self.keys_mut(), 0);
693 let val = slice_remove(self.vals_mut(), 0);
694 let edge = match self.reborrow_mut().force() {
695 ForceResult::Leaf(_) => None,
696 ForceResult::Internal(mut internal) => {
697 let edge = slice_remove(
698 slice::from_raw_parts_mut(
699 internal.as_internal_mut().edges.as_mut_ptr(),
705 let mut new_root = Root { node: edge, height: internal.height - 1 };
706 new_root.as_mut().as_leaf_mut().parent = ptr::null();
708 for i in 0..old_len {
709 Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
716 self.as_leaf_mut().len -= 1;
722 fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
724 self.keys_mut().as_mut_ptr(),
725 self.vals_mut().as_mut_ptr()
730 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
731 /// Checks whether a node is an `Internal` node or a `Leaf` node.
732 pub fn force(self) -> ForceResult<
733 NodeRef<BorrowType, K, V, marker::Leaf>,
734 NodeRef<BorrowType, K, V, marker::Internal>
736 if self.height == 0 {
737 ForceResult::Leaf(NodeRef {
744 ForceResult::Internal(NodeRef {
754 /// A reference to a specific key/value pair or edge within a node. The `Node` parameter
755 /// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
756 /// pair) or `Edge` (signifying a handle on an edge).
758 /// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
759 /// a child node, these represent the spaces where child pointers would go between the key/value
760 /// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
761 /// to the left of the node, one between the two pairs, and one at the right of the node.
762 pub struct Handle<Node, Type> {
765 _marker: PhantomData<Type>
768 impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
769 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
770 // `Clone`able is when it is an immutable reference and therefore `Copy`.
771 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
772 fn clone(&self) -> Self {
777 impl<Node, Type> Handle<Node, Type> {
778 /// Retrieves the node that contains the edge of key/value pair this handle pointes to.
779 pub fn into_node(self) -> Node {
784 impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
785 /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`.
786 pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
787 // Necessary for correctness, but in a private module
788 debug_assert!(idx < node.len());
797 pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
798 Handle::new_edge(self.node, self.idx)
801 pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
802 Handle::new_edge(self.node, self.idx + 1)
806 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
807 for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
809 fn eq(&self, other: &Self) -> bool {
810 self.node.node == other.node.node && self.idx == other.idx
814 impl<BorrowType, K, V, NodeType, HandleType>
815 Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
817 /// Temporarily takes out another, immutable handle on the same location.
818 pub fn reborrow(&self)
819 -> Handle<NodeRef<marker::Immut, K, V, NodeType>, HandleType> {
821 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
823 node: self.node.reborrow(),
830 impl<'a, K, V, NodeType, HandleType>
831 Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
833 /// Temporarily takes out another, mutable handle on the same location. Beware, as
834 /// this method is very dangerous, doubly so since it may not immediately appear
837 /// Because mutable pointers can roam anywhere around the tree and can even (through
838 /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
839 /// can easily be used to make the original mutable pointer dangling, or, in the case
840 /// of a reborrowed handle, out of bounds.
841 // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
842 // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
843 pub unsafe fn reborrow_mut(&mut self)
844 -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
846 // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
848 node: self.node.reborrow_mut(),
855 impl<BorrowType, K, V, NodeType>
856 Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
858 /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
860 pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
861 // Necessary for correctness, but in a private module
862 debug_assert!(idx <= node.len());
872 -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
875 Ok(Handle::new_kv(self.node, self.idx - 1))
881 pub fn right_kv(self)
882 -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
884 if self.idx < self.node.len() {
885 Ok(Handle::new_kv(self.node, self.idx))
892 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
893 /// Inserts a new key/value pair between the key/value pairs to the right and left of
894 /// this edge. This method assumes that there is enough space in the node for the new
897 /// The returned pointer points to the inserted value.
898 fn insert_fit(&mut self, key: K, val: V) -> *mut V {
899 // Necessary for correctness, but in a private module
900 debug_assert!(self.node.len() < CAPACITY);
903 slice_insert(self.node.keys_mut(), self.idx, key);
904 slice_insert(self.node.vals_mut(), self.idx, val);
906 self.node.as_leaf_mut().len += 1;
908 self.node.vals_mut().get_unchecked_mut(self.idx)
912 /// Inserts a new key/value pair between the key/value pairs to the right and left of
913 /// this edge. This method splits the node if there isn't enough room.
915 /// The returned pointer points to the inserted value.
916 pub fn insert(mut self, key: K, val: V)
917 -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
919 if self.node.len() < CAPACITY {
920 let ptr = self.insert_fit(key, val);
921 (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr)
923 let middle = Handle::new_kv(self.node, B);
924 let (mut left, k, v, mut right) = middle.split();
925 let ptr = if self.idx <= B {
927 Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val)
932 right.as_mut().cast_unchecked::<marker::Leaf>(),
934 ).insert_fit(key, val)
937 (InsertResult::Split(left, k, v, right), ptr)
942 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
943 /// Fixes the parent pointer and index in the child node below this edge. This is useful
944 /// when the ordering of edges has been changed, such as in the various `insert` methods.
945 fn correct_parent_link(mut self) {
946 let idx = self.idx as u16;
947 let ptr = self.node.as_internal_mut() as *mut _;
948 let mut child = self.descend();
949 child.as_leaf_mut().parent = ptr;
950 child.as_leaf_mut().parent_idx = idx;
953 /// Unsafely asserts to the compiler some static information about whether the underlying
954 /// node of this handle is a `Leaf`.
955 unsafe fn cast_unchecked<NewType>(&mut self)
956 -> Handle<NodeRef<marker::Mut, K, V, NewType>, marker::Edge> {
958 Handle::new_edge(self.node.cast_unchecked(), self.idx)
961 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
962 /// between this edge and the key/value pair to the right of this edge. This method assumes
963 /// that there is enough space in the node for the new pair to fit.
964 fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
965 // Necessary for correctness, but in an internal module
966 debug_assert!(self.node.len() < CAPACITY);
967 debug_assert!(edge.height == self.node.height - 1);
970 // This cast is a lie, but it allows us to reuse the key/value insertion logic.
971 self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
974 slice::from_raw_parts_mut(
975 self.node.as_internal_mut().edges.as_mut_ptr(),
982 for i in (self.idx+1)..(self.node.len()+1) {
983 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
988 /// Inserts a new key/value pair and an edge that will go to the right of that new pair
989 /// between this edge and the key/value pair to the right of this edge. This method splits
990 /// the node if there isn't enough room.
991 pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
992 -> InsertResult<'a, K, V, marker::Internal> {
994 // Necessary for correctness, but this is an internal module
995 debug_assert!(edge.height == self.node.height - 1);
997 if self.node.len() < CAPACITY {
998 self.insert_fit(key, val, edge);
999 InsertResult::Fit(Handle::new_kv(self.node, self.idx))
1001 let middle = Handle::new_kv(self.node, B);
1002 let (mut left, k, v, mut right) = middle.split();
1005 Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
1010 right.as_mut().cast_unchecked::<marker::Internal>(),
1012 ).insert_fit(key, val, edge);
1015 InsertResult::Split(left, k, v, right)
1020 impl<BorrowType, K, V>
1021 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
1023 /// Finds the node pointed to by this edge.
1025 /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
1026 /// both, upon success, do nothing.
1027 pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
1029 height: self.node.height - 1,
1030 node: unsafe { self.node.as_internal().edges.get_unchecked(self.idx).as_ptr() },
1031 root: self.node.root,
1032 _marker: PhantomData
1037 impl<'a, K: 'a, V: 'a, NodeType>
1038 Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
1040 pub fn into_kv(self) -> (&'a K, &'a V) {
1041 let (keys, vals) = self.node.into_slices();
1043 (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
1048 impl<'a, K: 'a, V: 'a, NodeType>
1049 Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1051 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
1052 let (mut keys, mut vals) = self.node.into_slices_mut();
1054 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1059 impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
1060 pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
1062 let (mut keys, mut vals) = self.node.reborrow_mut().into_slices_mut();
1063 (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
1068 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
1069 /// Splits the underlying node into three parts:
1071 /// - The node is truncated to only contain the key/value pairs to the right of
1073 /// - The key and value pointed to by this handle and extracted.
1074 /// - All the key/value pairs to the right of this handle are put into a newly
1076 pub fn split(mut self)
1077 -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
1079 let mut new_node = Box::new(LeafNode::new());
1081 let k = ptr::read(self.node.keys().get_unchecked(self.idx));
1082 let v = ptr::read(self.node.vals().get_unchecked(self.idx));
1084 let new_len = self.node.len() - self.idx - 1;
1086 ptr::copy_nonoverlapping(
1087 self.node.keys().as_ptr().offset(self.idx as isize + 1),
1088 new_node.keys.as_mut_ptr(),
1091 ptr::copy_nonoverlapping(
1092 self.node.vals().as_ptr().offset(self.idx as isize + 1),
1093 new_node.vals.as_mut_ptr(),
1097 self.node.as_leaf_mut().len = self.idx as u16;
1098 new_node.len = new_len as u16;
1104 node: BoxedNode::from_leaf(new_node),
1111 /// Removes the key/value pair pointed to by this handle, returning the edge between the
1112 /// now adjacent key/value pairs to the left and right of this handle.
1113 pub fn remove(mut self)
1114 -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
1116 let k = slice_remove(self.node.keys_mut(), self.idx);
1117 let v = slice_remove(self.node.vals_mut(), self.idx);
1118 self.node.as_leaf_mut().len -= 1;
1119 (self.left_edge(), k, v)
1124 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
1125 /// Splits the underlying node into three parts:
1127 /// - The node is truncated to only contain the edges and key/value pairs to the
1128 /// right of this handle.
1129 /// - The key and value pointed to by this handle and extracted.
1130 /// - All the edges and key/value pairs to the right of this handle are put into
1131 /// a newly allocated node.
1132 pub fn split(mut self)
1133 -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
1135 let mut new_node = Box::new(InternalNode::new());
1137 let k = ptr::read(self.node.keys().get_unchecked(self.idx));
1138 let v = ptr::read(self.node.vals().get_unchecked(self.idx));
1140 let height = self.node.height;
1141 let new_len = self.node.len() - self.idx - 1;
1143 ptr::copy_nonoverlapping(
1144 self.node.keys().as_ptr().offset(self.idx as isize + 1),
1145 new_node.data.keys.as_mut_ptr(),
1148 ptr::copy_nonoverlapping(
1149 self.node.vals().as_ptr().offset(self.idx as isize + 1),
1150 new_node.data.vals.as_mut_ptr(),
1153 ptr::copy_nonoverlapping(
1154 self.node.as_internal().edges.as_ptr().offset(self.idx as isize + 1),
1155 new_node.edges.as_mut_ptr(),
1159 self.node.as_leaf_mut().len = self.idx as u16;
1160 new_node.data.len = new_len as u16;
1162 let mut new_root = Root {
1163 node: BoxedNode::from_internal(new_node),
1167 for i in 0..(new_len+1) {
1168 Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
1179 /// Returns whether it is valid to call `.merge()`, i.e., whether there is enough room in
1180 /// a node to hold the combination of the nodes to the left and right of this handle along
1181 /// with the key/value pair at this handle.
1182 pub fn can_merge(&self) -> bool {
1196 /// Combines the node immediately to the left of this handle, the key/value pair pointed
1197 /// to by this handle, and the node immediately to the right of this handle into one new
1198 /// child of the underlying node, returning an edge referencing that new child.
1200 /// Assumes that this edge `.can_merge()`.
1201 pub fn merge(mut self)
1202 -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
1203 let self1 = unsafe { ptr::read(&self) };
1204 let self2 = unsafe { ptr::read(&self) };
1205 let mut left_node = self1.left_edge().descend();
1206 let left_len = left_node.len();
1207 let mut right_node = self2.right_edge().descend();
1208 let right_len = right_node.len();
1210 // necessary for correctness, but in a private module
1211 debug_assert!(left_len + right_len + 1 <= CAPACITY);
1214 ptr::write(left_node.keys_mut().get_unchecked_mut(left_len),
1215 slice_remove(self.node.keys_mut(), self.idx));
1216 ptr::copy_nonoverlapping(
1217 right_node.keys().as_ptr(),
1218 left_node.keys_mut().as_mut_ptr().offset(left_len as isize + 1),
1221 ptr::write(left_node.vals_mut().get_unchecked_mut(left_len),
1222 slice_remove(self.node.vals_mut(), self.idx));
1223 ptr::copy_nonoverlapping(
1224 right_node.vals().as_ptr(),
1225 left_node.vals_mut().as_mut_ptr().offset(left_len as isize + 1),
1229 slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
1230 for i in self.idx+1..self.node.len() {
1231 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
1233 self.node.as_leaf_mut().len -= 1;
1235 left_node.as_leaf_mut().len += right_len as u16 + 1;
1237 if self.node.height > 1 {
1238 ptr::copy_nonoverlapping(
1239 right_node.cast_unchecked().as_internal().edges.as_ptr(),
1240 left_node.cast_unchecked()
1244 .offset(left_len as isize + 1),
1248 for i in left_len+1..left_len+right_len+2 {
1250 left_node.cast_unchecked().reborrow_mut(),
1252 ).correct_parent_link();
1256 right_node.node.get() as *mut u8,
1257 mem::size_of::<InternalNode<K, V>>(),
1258 mem::align_of::<InternalNode<K, V>>()
1262 right_node.node.get() as *mut u8,
1263 mem::size_of::<LeafNode<K, V>>(),
1264 mem::align_of::<LeafNode<K, V>>()
1268 Handle::new_edge(self.node, self.idx)
1272 /// This removes a key/value pair from the left child and replaces it with the key/value pair
1273 /// pointed to by this handle while pushing the old key/value pair of this handle into the right
1275 pub fn steal_left(&mut self) {
1277 let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
1279 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1280 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1282 match self.reborrow_mut().right_edge().descend().force() {
1283 ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
1284 ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap())
1289 /// This removes a key/value pair from the right child and replaces it with the key/value pair
1290 /// pointed to by this handle while pushing the old key/value pair of this handle into the left
1292 pub fn steal_right(&mut self) {
1294 let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
1296 let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
1297 let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
1299 match self.reborrow_mut().left_edge().descend().force() {
1300 ForceResult::Leaf(mut leaf) => leaf.push(k, v),
1301 ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap())
1306 /// This does stealing similar to `steal_left` but steals multiple elements at once.
1307 pub fn bulk_steal_left(&mut self, count: usize) {
1309 let mut left_node = ptr::read(self).left_edge().descend();
1310 let left_len = left_node.len();
1311 let mut right_node = ptr::read(self).right_edge().descend();
1312 let right_len = right_node.len();
1314 // Make sure that we may steal safely.
1315 debug_assert!(right_len + count <= CAPACITY);
1316 debug_assert!(left_len >= count);
1318 let new_left_len = left_len - count;
1322 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1323 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1325 let kv = self.reborrow_mut().into_kv_mut();
1326 (kv.0 as *mut K, kv.1 as *mut V)
1329 // Make room for stolen elements in the right child.
1330 ptr::copy(right_kv.0,
1331 right_kv.0.offset(count as isize),
1333 ptr::copy(right_kv.1,
1334 right_kv.1.offset(count as isize),
1337 // Move elements from the left child to the right one.
1338 move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
1340 // Move parent's key/value pair to the right child.
1341 move_kv(parent_kv, 0, right_kv, count - 1, 1);
1343 // Move the left-most stolen pair to the parent.
1344 move_kv(left_kv, new_left_len, parent_kv, 0, 1);
1347 left_node.reborrow_mut().as_leaf_mut().len -= count as u16;
1348 right_node.reborrow_mut().as_leaf_mut().len += count as u16;
1350 match (left_node.force(), right_node.force()) {
1351 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1352 // Make room for stolen edges.
1353 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1354 ptr::copy(right_edges,
1355 right_edges.offset(count as isize),
1357 right.correct_childrens_parent_links(count, count + right_len + 1);
1359 move_edges(left, new_left_len + 1, right, 0, count);
1361 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1362 _ => { unreachable!(); }
1367 /// The symmetric clone of `bulk_steal_left`.
1368 pub fn bulk_steal_right(&mut self, count: usize) {
1370 let mut left_node = ptr::read(self).left_edge().descend();
1371 let left_len = left_node.len();
1372 let mut right_node = ptr::read(self).right_edge().descend();
1373 let right_len = right_node.len();
1375 // Make sure that we may steal safely.
1376 debug_assert!(left_len + count <= CAPACITY);
1377 debug_assert!(right_len >= count);
1379 let new_right_len = right_len - count;
1383 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1384 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1386 let kv = self.reborrow_mut().into_kv_mut();
1387 (kv.0 as *mut K, kv.1 as *mut V)
1390 // Move parent's key/value pair to the left child.
1391 move_kv(parent_kv, 0, left_kv, left_len, 1);
1393 // Move elements from the right child to the left one.
1394 move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
1396 // Move the right-most stolen pair to the parent.
1397 move_kv(right_kv, count - 1, parent_kv, 0, 1);
1399 // Fix right indexing
1400 ptr::copy(right_kv.0.offset(count as isize),
1403 ptr::copy(right_kv.1.offset(count as isize),
1408 left_node.reborrow_mut().as_leaf_mut().len += count as u16;
1409 right_node.reborrow_mut().as_leaf_mut().len -= count as u16;
1411 match (left_node.force(), right_node.force()) {
1412 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
1413 move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
1415 // Fix right indexing.
1416 let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
1417 ptr::copy(right_edges.offset(count as isize),
1420 right.correct_childrens_parent_links(0, new_right_len + 1);
1422 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1423 _ => { unreachable!(); }
1429 unsafe fn move_kv<K, V>(
1430 source: (*mut K, *mut V), source_offset: usize,
1431 dest: (*mut K, *mut V), dest_offset: usize,
1434 ptr::copy_nonoverlapping(source.0.offset(source_offset as isize),
1435 dest.0.offset(dest_offset as isize),
1437 ptr::copy_nonoverlapping(source.1.offset(source_offset as isize),
1438 dest.1.offset(dest_offset as isize),
1442 // Source and destination must have the same height.
1443 unsafe fn move_edges<K, V>(
1444 mut source: NodeRef<marker::Mut, K, V, marker::Internal>, source_offset: usize,
1445 mut dest: NodeRef<marker::Mut, K, V, marker::Internal>, dest_offset: usize,
1448 let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
1449 let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
1450 ptr::copy_nonoverlapping(source_ptr.offset(source_offset as isize),
1451 dest_ptr.offset(dest_offset as isize),
1453 dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
1456 impl<BorrowType, K, V, HandleType>
1457 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
1459 /// Check whether the underlying node is an `Internal` node or a `Leaf` node.
1460 pub fn force(self) -> ForceResult<
1461 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
1462 Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
1464 match self.node.force() {
1465 ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
1468 _marker: PhantomData
1470 ForceResult::Internal(node) => ForceResult::Internal(Handle {
1473 _marker: PhantomData
1479 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
1480 /// Move the suffix after `self` from one node to another one. `right` must be empty.
1481 /// The first edge of `right` remains unchanged.
1482 pub fn move_suffix(&mut self,
1483 right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>) {
1485 let left_new_len = self.idx;
1486 let mut left_node = self.reborrow_mut().into_node();
1488 let right_new_len = left_node.len() - left_new_len;
1489 let mut right_node = right.reborrow_mut();
1491 debug_assert!(right_node.len() == 0);
1492 debug_assert!(left_node.height == right_node.height);
1494 let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
1495 let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
1498 move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
1500 left_node.reborrow_mut().as_leaf_mut().len = left_new_len as u16;
1501 right_node.reborrow_mut().as_leaf_mut().len = right_new_len as u16;
1503 match (left_node.force(), right_node.force()) {
1504 (ForceResult::Internal(left), ForceResult::Internal(right)) => {
1505 move_edges(left, left_new_len + 1, right, 1, right_new_len);
1507 (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
1508 _ => { unreachable!(); }
1514 pub enum ForceResult<Leaf, Internal> {
1519 pub enum InsertResult<'a, K, V, Type> {
1520 Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
1521 Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>)
1525 use core::marker::PhantomData;
1528 pub enum Internal { }
1529 pub enum LeafOrInternal { }
1532 pub struct Immut<'a>(PhantomData<&'a ()>);
1533 pub struct Mut<'a>(PhantomData<&'a mut ()>);
1539 unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
1541 slice.as_ptr().offset(idx as isize),
1542 slice.as_mut_ptr().offset(idx as isize + 1),
1545 ptr::write(slice.get_unchecked_mut(idx), val);
1548 unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
1549 let ret = ptr::read(slice.get_unchecked(idx));
1551 slice.as_ptr().offset(idx as isize + 1),
1552 slice.as_mut_ptr().offset(idx as isize),
1553 slice.len() - idx - 1