1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // This module represents all the internal representation and logic for a B-Tree's node
12 // with a safe interface, so that BTreeMap itself does not depend on any of these details.
14 pub use self::InsertionResult::*;
15 pub use self::SearchResult::*;
16 pub use self::ForceResult::*;
17 pub use self::TraversalItem::*;
19 use core::cmp::Ordering::{Greater, Less, Equal};
20 use core::intrinsics::arith_offset;
22 use core::marker::PhantomData;
23 use core::ops::{Deref, DerefMut, Index, IndexMut};
24 use core::ptr::Unique;
25 use core::{slice, mem, ptr, cmp};
26 use alloc::heap::{self, EMPTY};
30 /// Represents the result of an Insertion: either the item fit, or the node had to split
31 pub enum InsertionResult<K, V> {
32 /// The inserted element fit
34 /// The inserted element did not fit, so the node was split
35 Split(K, V, Node<K, V>),
38 /// Represents the result of a search for a key in a single node
39 pub enum SearchResult<NodeRef> {
40 /// The element was found at the given index
41 Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
42 /// The element wasn't found, but if it's anywhere, it must be beyond this edge
43 GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
46 /// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
47 #[unsafe_no_drop_flag]
48 pub struct Node<K, V> {
49 // To avoid the need for multiple allocations, we allocate a single buffer with enough space
50 // for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
51 // Despite this, we store three separate pointers to the three "chunks" of the buffer because
52 // the performance drops significantly if the locations of the vals and edges need to be
53 // recalculated upon access.
55 // These will never be null during normal usage of a `Node`. However, to avoid the need for a
56 // drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
61 // In leaf nodes, this will be None, and no space will be allocated for edges.
62 edges: Option<Unique<Node<K, V>>>,
64 // At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
65 // `_len + 1` edges. In a leaf node, there will never be any edges.
67 // Note: instead of accessing this field directly, please call the `len()` method, which should
68 // be more stable in the face of representation changes.
71 // FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
72 // be constant throughout the tree. Once a solution to this is found, it might be possible to
73 // also pass down the offsets into the buffer that vals and edges are stored at, removing the
74 // need for those two pointers.
76 // Note: instead of accessing this field directly, please call the `capacity()` method, which
77 // should be more stable in the face of representation changes.
81 struct NodeSlice<'a, K: 'a, V: 'a> {
84 pub edges: &'a [Node<K, V>],
90 struct MutNodeSlice<'a, K: 'a, V: 'a> {
93 pub edges: &'a mut [Node<K, V>],
99 /// Rounds up to a multiple of a power of two. Returns the closest multiple
100 /// of `target_alignment` that is higher or equal to `unrounded`.
104 /// Fails if `target_alignment` is not a power of two.
106 fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
107 assert!(target_alignment.is_power_of_two());
108 (unrounded + target_alignment - 1) & !(target_alignment - 1)
113 assert_eq!(round_up_to_next(0, 4), 0);
114 assert_eq!(round_up_to_next(1, 4), 4);
115 assert_eq!(round_up_to_next(2, 4), 4);
116 assert_eq!(round_up_to_next(3, 4), 4);
117 assert_eq!(round_up_to_next(4, 4), 4);
118 assert_eq!(round_up_to_next(5, 4), 8);
121 // Returns a tuple of (val_offset, edge_offset),
122 // from the start of a mallocated array.
124 fn calculate_offsets(keys_size: usize,
129 let vals_offset = round_up_to_next(keys_size, vals_align);
130 let end_of_vals = vals_offset + vals_size;
132 let edges_offset = round_up_to_next(end_of_vals, edges_align);
134 (vals_offset, edges_offset)
137 // Returns a tuple of (minimum required alignment, array_size),
138 // from the start of a mallocated array.
140 fn calculate_allocation(keys_size: usize,
147 let (_, edges_offset) = calculate_offsets(keys_size, vals_size, vals_align, edges_align);
148 let end_of_edges = edges_offset + edges_size;
150 let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
152 (min_align, end_of_edges)
156 fn test_offset_calculation() {
157 assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
158 assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
159 assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
160 assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
161 assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
162 assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
165 fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
166 let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::align_of::<K>());
167 let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::align_of::<V>());
168 let (edges_size, edges_align) = if is_leaf {
169 // allocate one edge to ensure that we don't pass size 0 to `heap::allocate`
170 if mem::size_of::<K>() == 0 && mem::size_of::<V>() == 0 {
171 (1, mem::align_of::<Node<K, V>>())
176 ((capacity + 1) * mem::size_of::<Node<K, V>>(),
177 mem::align_of::<Node<K, V>>())
180 calculate_allocation(keys_size,
188 fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
189 let keys_size = capacity * mem::size_of::<K>();
190 let vals_size = capacity * mem::size_of::<V>();
191 let vals_align = mem::align_of::<V>();
192 let edges_align = if is_leaf {
195 mem::align_of::<Node<K, V>>()
198 calculate_offsets(keys_size, vals_size, vals_align, edges_align)
201 /// An iterator over a slice that owns the elements of the slice but not the allocation.
207 impl<T> RawItems<T> {
208 unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
209 RawItems::from_parts(slice.as_ptr(), slice.len())
212 unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
213 if mem::size_of::<T>() == 0 {
216 tail: arith_offset(ptr as *const i8, len as isize) as *const T,
221 tail: ptr.offset(len as isize),
226 unsafe fn push(&mut self, val: T) {
227 ptr::write(self.tail as *mut T, val);
229 if mem::size_of::<T>() == 0 {
230 self.tail = arith_offset(self.tail as *const i8, 1) as *const T;
232 self.tail = self.tail.offset(1);
237 impl<T> Iterator for RawItems<T> {
240 fn next(&mut self) -> Option<T> {
241 if self.head == self.tail {
245 let ret = Some(ptr::read(self.head));
247 if mem::size_of::<T>() == 0 {
248 self.head = arith_offset(self.head as *const i8, 1) as *const T;
250 self.head = self.head.offset(1);
259 impl<T> DoubleEndedIterator for RawItems<T> {
260 fn next_back(&mut self) -> Option<T> {
261 if self.head == self.tail {
265 if mem::size_of::<T>() == 0 {
266 self.tail = arith_offset(self.tail as *const i8, -1) as *const T;
268 self.tail = self.tail.offset(-1);
271 Some(ptr::read(self.tail))
277 impl<T> Drop for RawItems<T> {
278 #[unsafe_destructor_blind_to_params]
284 impl<K, V> Drop for Node<K, V> {
285 #[unsafe_destructor_blind_to_params]
287 if self.keys.is_null() ||
288 (unsafe { self.keys.get() as *const K as usize == mem::POST_DROP_USIZE }) {
289 // Since we have #[unsafe_no_drop_flag], we have to watch
290 // out for the sentinel value being stored in self.keys. (Using
291 // null is technically a violation of the `Unique`
292 // requirements, though.)
296 // Do the actual cleanup.
298 drop(RawItems::from_slice(self.keys()));
299 drop(RawItems::from_slice(self.vals()));
300 drop(RawItems::from_slice(self.edges()));
305 self.keys = unsafe { Unique::new(ptr::null_mut()) };
309 impl<K, V> Node<K, V> {
310 /// Make a new internal node. The caller must initialize the result to fix the invariant that
311 /// there are `len() + 1` edges.
312 unsafe fn new_internal(capacity: usize) -> Node<K, V> {
313 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
315 let buffer = heap::allocate(size, alignment);
316 if buffer.is_null() {
320 let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
323 keys: Unique::new(buffer as *mut K),
324 vals: Unique::new(buffer.offset(vals_offset as isize) as *mut V),
325 edges: Some(Unique::new(buffer.offset(edges_offset as isize) as *mut Node<K, V>)),
331 /// Make a new leaf node
332 fn new_leaf(capacity: usize) -> Node<K, V> {
333 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
335 let buffer = unsafe { heap::allocate(size, alignment) };
336 if buffer.is_null() {
340 let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
343 keys: unsafe { Unique::new(buffer as *mut K) },
344 vals: unsafe { Unique::new(buffer.offset(vals_offset as isize) as *mut V) },
351 unsafe fn destroy(&mut self) {
352 let (alignment, size) = calculate_allocation_generic::<K, V>(self.capacity(),
354 heap::deallocate(*self.keys as *mut u8, size, alignment);
358 pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
360 (slice::from_raw_parts(*self.keys, self.len()),
361 slice::from_raw_parts(*self.vals, self.len()))
366 pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
368 (slice::from_raw_parts_mut(*self.keys, self.len()),
369 slice::from_raw_parts_mut(*self.vals, self.len()))
374 pub fn as_slices_internal<'b>(&'b self) -> NodeSlice<'b, K, V> {
375 let is_leaf = self.is_leaf();
376 let (keys, vals) = self.as_slices();
377 let edges: &[_] = if self.is_leaf() {
381 let data = match self.edges {
382 None => heap::EMPTY as *const Node<K, V>,
383 Some(ref p) => **p as *const Node<K, V>,
385 slice::from_raw_parts(data, self.len() + 1)
399 pub fn as_slices_internal_mut<'b>(&'b mut self) -> MutNodeSlice<'b, K, V> {
400 let len = self.len();
401 let is_leaf = self.is_leaf();
402 let keys = unsafe { slice::from_raw_parts_mut(*self.keys, len) };
403 let vals = unsafe { slice::from_raw_parts_mut(*self.vals, len) };
404 let edges: &mut [_] = if is_leaf {
408 let data = match self.edges {
409 None => heap::EMPTY as *mut Node<K, V>,
410 Some(ref mut p) => **p as *mut Node<K, V>,
412 slice::from_raw_parts_mut(data, len + 1)
426 pub fn keys<'a>(&'a self) -> &'a [K] {
431 pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
432 self.as_slices_mut().0
436 pub fn vals<'a>(&'a self) -> &'a [V] {
441 pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
442 self.as_slices_mut().1
446 pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
447 self.as_slices_internal().edges
451 pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
452 self.as_slices_internal_mut().edges
456 // FIXME(gereeter) Write an efficient clone_from
457 impl<K: Clone, V: Clone> Clone for Node<K, V> {
458 fn clone(&self) -> Node<K, V> {
459 let mut ret = if self.is_leaf() {
460 Node::new_leaf(self.capacity())
462 unsafe { Node::new_internal(self.capacity()) }
466 // For failure safety
467 let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
468 let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
469 let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
471 for key in self.keys() {
472 keys.push(key.clone())
474 for val in self.vals() {
475 vals.push(val.clone())
477 for edge in self.edges() {
478 edges.push(edge.clone())
485 ret._len = self.len();
492 /// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
493 /// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
494 /// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
495 /// accessing the stored values, and moving around the `Node`.
497 /// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
498 /// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
499 /// don't need mutability on both mutable and immutable references. Secondly and more importantly,
500 /// this allows users of the `Handle` API to associate metadata with the reference. This is used in
501 /// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
504 /// # A note on safety
506 /// Unfortunately, the extra power afforded by being generic also means that safety can technically
507 /// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
508 /// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
509 /// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
510 /// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
514 /// struct Nasty<'a> {
515 /// first: &'a Node<usize, usize>,
516 /// second: &'a Node<usize, usize>,
517 /// flag: &'a Cell<bool>,
520 /// impl<'a> Deref for Nasty<'a> {
521 /// type Target = Node<usize, usize>;
523 /// fn deref(&self) -> &Node<usize, usize> {
524 /// if self.flag.get() {
533 /// let flag = Cell::new(false);
534 /// let mut small_node = Node::make_leaf_root(3);
535 /// let mut large_node = Node::make_leaf_root(100);
537 /// for i in 0..100 {
538 /// // Insert to the end
539 /// large_node.edge_handle(i).insert_as_leaf(i, i);
542 /// let nasty = Nasty {
543 /// first: &large_node,
544 /// second: &small_node,
548 /// // The handle points at index 75.
549 /// let handle = Node::search(nasty, 75);
551 /// // Now the handle still points at index 75, but on the small node, which has no index 75.
554 /// println!("Uninitialized memory: {:?}", handle.into_kv());
557 #[derive(Copy, Clone)]
558 pub struct Handle<NodeRef, Type, NodeType> {
561 marker: PhantomData<(Type, NodeType)>,
569 // Handle node types.
570 pub enum LeafOrInternal {}
575 impl<K: Ord, V> Node<K, V> {
576 /// Searches for the given key in the node. If it finds an exact match,
577 /// `Found` will be yielded with the matching index. If it doesn't find an exact match,
578 /// `GoDown` will be yielded with the index of the subtree the key must lie in.
579 pub fn search<Q: ?Sized, NodeRef: Deref<Target = Node<K, V>>>(node: NodeRef,
581 -> SearchResult<NodeRef>
585 // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
586 // For the B configured as of this writing (B = 6), binary search was *significantly*
588 match node.as_slices_internal().search_linear(key) {
608 impl<K, V> Node<K, V> {
609 /// Make a leaf root from scratch
610 pub fn make_leaf_root(b: usize) -> Node<K, V> {
611 Node::new_leaf(capacity_from_b(b))
614 /// Make an internal root and swap it with an old root
615 pub fn make_internal_root(left_and_out: &mut Node<K, V>,
620 let node = mem::replace(left_and_out,
621 unsafe { Node::new_internal(capacity_from_b(b)) });
622 left_and_out._len = 1;
624 ptr::write(left_and_out.keys_mut().get_unchecked_mut(0), key);
625 ptr::write(left_and_out.vals_mut().get_unchecked_mut(0), value);
626 ptr::write(left_and_out.edges_mut().get_unchecked_mut(0), node);
627 ptr::write(left_and_out.edges_mut().get_unchecked_mut(1), right);
631 /// How many key-value pairs the node contains
632 pub fn len(&self) -> usize {
636 /// Does the node not contain any key-value pairs
637 pub fn is_empty(&self) -> bool {
641 /// How many key-value pairs the node can fit
642 pub fn capacity(&self) -> usize {
646 /// If the node has any children
647 pub fn is_leaf(&self) -> bool {
651 /// if the node has too few elements
652 pub fn is_underfull(&self) -> bool {
653 self.len() < min_load_from_capacity(self.capacity())
656 /// if the node cannot fit any more elements
657 pub fn is_full(&self) -> bool {
658 self.len() == self.capacity()
662 impl<K, V, NodeRef: Deref<Target = Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
663 /// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
664 /// is very different from `edge` and `edge_mut` because those return children of the node
665 /// returned by `node`.
666 pub fn node(&self) -> &Node<K, V> {
671 impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType>
672 where NodeRef: Deref<Target = Node<K, V>> + DerefMut
674 /// Converts a handle into one that stores the same information using a raw pointer. This can
675 /// be useful in conjunction with `from_raw` when the type system is insufficient for
676 /// determining the lifetimes of the nodes.
677 pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
679 node: &mut *self.node as *mut _,
686 impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
687 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
688 /// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
689 /// unsafely extending the lifetime of the reference to the `Node`.
690 pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
698 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
699 /// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
700 /// allow unsafely extending the lifetime of the reference to the `Node`.
701 pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
703 node: &mut *self.node,
710 impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
711 /// Turns the handle into a reference to the edge it points at. This is necessary because the
712 /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
713 /// making it more suitable for moving down a chain of nodes.
714 pub fn into_edge(self) -> &'a Node<K, V> {
715 unsafe { self.node.edges().get_unchecked(self.index) }
719 impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
720 /// Turns the handle into a mutable reference to the edge it points at. This is necessary
721 /// because the returned pointer has a larger lifetime than what would be returned by
722 /// `edge_mut`, making it more suitable for moving down a chain of nodes.
723 pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
724 unsafe { self.node.edges_mut().get_unchecked_mut(self.index) }
728 impl<K, V, NodeRef: Deref<Target = Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
729 // This doesn't exist because there are no uses for it,
730 // but is fine to add, analogous to edge_mut.
732 // /// Returns a reference to the edge pointed-to by this handle. This should not be
733 // /// confused with `node`, which references the parent node of what is returned here.
734 // pub fn edge(&self) -> &Node<K, V>
737 pub enum ForceResult<NodeRef, Type> {
738 Leaf(Handle<NodeRef, Type, handle::Leaf>),
739 Internal(Handle<NodeRef, Type, handle::Internal>),
742 impl<K, V, NodeRef: Deref<Target = Node<K, V>>, Type>
743 Handle<NodeRef, Type, handle::LeafOrInternal>
745 /// Figure out whether this handle is pointing to something in a leaf node or to something in
746 /// an internal node, clarifying the type according to the result.
747 pub fn force(self) -> ForceResult<NodeRef, Type> {
748 if self.node.is_leaf() {
763 impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Leaf>
764 where NodeRef: Deref<Target = Node<K, V>> + DerefMut
766 /// Tries to insert this key-value pair at the given index in this leaf node
767 /// If the node is full, we have to split it.
769 /// Returns a *mut V to the inserted value, because the caller may want this when
770 /// they're done mutating the tree, but we don't want to borrow anything for now.
771 pub fn insert_as_leaf(mut self, key: K, value: V) -> (InsertionResult<K, V>, *mut V) {
772 if !self.node.is_full() {
773 // The element can fit, just insert it
774 (Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
776 // The element can't fit, this node is full. Split it into two nodes.
777 let (new_key, new_val, mut new_right) = self.node.split();
778 let left_len = self.node.len();
781 if self.index <= left_len {
782 self.node.insert_kv(self.index, key, value)
784 // We need to subtract 1 because in splitting we took out new_key and new_val.
785 // Just being in the right node means we are past left_len k/v pairs in the
786 // left node and 1 k/v pair in the parent node.
787 new_right.insert_kv(self.index - left_len - 1, key, value)
791 (Split(new_key, new_val, new_right), ptr)
796 impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Internal>
797 where NodeRef: Deref<Target = Node<K, V>> + DerefMut
799 /// Returns a mutable reference to the edge pointed-to by this handle. This should not be
800 /// confused with `node`, which references the parent node of what is returned here.
801 pub fn edge_mut(&mut self) -> &mut Node<K, V> {
802 unsafe { self.node.edges_mut().get_unchecked_mut(self.index) }
805 /// Tries to insert this key-value pair at the given index in this internal node
806 /// If the node is full, we have to split it.
807 pub fn insert_as_internal(mut self,
811 -> InsertionResult<K, V> {
812 if !self.node.is_full() {
813 // The element can fit, just insert it
815 self.node.insert_kv(self.index, key, value);
816 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
820 // The element can't fit, this node is full. Split it into two nodes.
821 let (new_key, new_val, mut new_right) = self.node.split();
822 let left_len = self.node.len();
824 if self.index <= left_len {
826 self.node.insert_kv(self.index, key, value);
827 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
831 // The -1 here is for the same reason as in insert_as_internal - because we
832 // split, there are actually left_len + 1 k/v pairs before the right node, with
833 // the extra 1 being put in the parent.
834 new_right.insert_kv(self.index - left_len - 1, key, value);
835 new_right.insert_edge(self.index - left_len, right);
839 Split(new_key, new_val, new_right)
843 /// Handle an underflow in this node's child. We favor handling "to the left" because we know
844 /// we're empty, but our neighbour can be full. Handling to the left means when we choose to
845 /// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
846 /// (always slow, but at least faster since we know we're half-empty).
847 /// Handling "to the right" reverses these roles. Of course, we merge whenever possible
848 /// because we want dense nodes, and merging is about equal work regardless of direction.
849 pub fn handle_underflow(mut self) {
852 self.handle_underflow_to_left();
854 self.handle_underflow_to_right();
859 /// Right is underflowed. Tries to steal from left,
860 /// but merges left and right if left is low too.
861 unsafe fn handle_underflow_to_left(&mut self) {
862 let left_len = self.node.edges()[self.index - 1].len();
863 if left_len > min_load_from_capacity(self.node.capacity()) {
864 self.left_kv().steal_rightward();
866 self.left_kv().merge_children();
870 /// Left is underflowed. Tries to steal from the right,
871 /// but merges left and right if right is low too.
872 unsafe fn handle_underflow_to_right(&mut self) {
873 let right_len = self.node.edges()[self.index + 1].len();
874 if right_len > min_load_from_capacity(self.node.capacity()) {
875 self.right_kv().steal_leftward();
877 self.right_kv().merge_children();
882 impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType>
883 where NodeRef: Deref<Target = Node<K, V>> + DerefMut
885 /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
886 /// This is unsafe because the handle might point to the first edge in the node, which has no
887 /// pair to its left.
888 unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
890 node: &mut *self.node,
891 index: self.index - 1,
896 /// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
897 /// This is unsafe because the handle might point to the last edge in the node, which has no
898 /// pair to its right.
899 unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
901 node: &mut *self.node,
908 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
909 /// Turns the handle into references to the key and value it points at. This is necessary
910 /// because the returned pointers have larger lifetimes than what would be returned by `key`
912 pub fn into_kv(self) -> (&'a K, &'a V) {
913 let (keys, vals) = self.node.as_slices();
915 (keys.get_unchecked(self.index),
916 vals.get_unchecked(self.index))
921 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
922 /// Turns the handle into mutable references to the key and value it points at. This is
923 /// necessary because the returned pointers have larger lifetimes than what would be returned
924 /// by `key_mut` or `val_mut`.
925 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
926 let (keys, vals) = self.node.as_slices_mut();
928 (keys.get_unchecked_mut(self.index),
929 vals.get_unchecked_mut(self.index))
933 /// Convert this handle into one pointing at the edge immediately to the left of the key/value
934 /// pair pointed-to by this handle. This is useful because it returns a reference with larger
935 /// lifetime than `left_edge`.
936 pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
938 node: &mut *self.node,
946 impl<'a, K: 'a, V: 'a, NodeRef: Deref<Target = Node<K, V>> + 'a, NodeType> Handle<NodeRef,
949 // These are fine to include, but are currently unneeded.
951 // /// Returns a reference to the key pointed-to by this handle. This doesn't return a
952 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
954 // pub fn key(&'a self) -> &'a K {
955 // unsafe { self.node.keys().get_unchecked(self.index) }
958 // /// Returns a reference to the value pointed-to by this handle. This doesn't return a
959 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
961 // pub fn val(&'a self) -> &'a V {
962 // unsafe { self.node.vals().get_unchecked(self.index) }
966 impl<'a, K: 'a, V: 'a, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType>
967 where NodeRef: 'a + Deref<Target = Node<K, V>> + DerefMut
969 /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
970 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
972 pub fn key_mut(&'a mut self) -> &'a mut K {
973 unsafe { self.node.keys_mut().get_unchecked_mut(self.index) }
976 /// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
977 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
979 pub fn val_mut(&'a mut self) -> &'a mut V {
980 unsafe { self.node.vals_mut().get_unchecked_mut(self.index) }
984 impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType>
985 where NodeRef: Deref<Target = Node<K, V>> + DerefMut
987 /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
988 /// to by this handle.
989 pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
991 node: &mut *self.node,
997 /// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
998 /// to by this handle.
999 pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
1001 node: &mut *self.node,
1002 index: self.index + 1,
1003 marker: PhantomData,
1008 impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Leaf>
1009 where NodeRef: Deref<Target = Node<K, V>> + DerefMut
1011 /// Removes the key/value pair at the handle's location.
1013 /// # Panics (in debug build)
1015 /// Panics if the node containing the pair is not a leaf node.
1016 pub fn remove_as_leaf(mut self) -> (K, V) {
1017 unsafe { self.node.remove_kv(self.index) }
1021 impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Internal>
1022 where NodeRef: Deref<Target = Node<K, V>> + DerefMut
1024 /// Steal! Stealing is roughly analogous to a binary tree rotation.
1025 /// In this case, we're "rotating" right.
1026 unsafe fn steal_rightward(&mut self) {
1027 // Take the biggest stuff off left
1028 let (mut key, mut val, edge) = {
1029 let mut left_handle = self.left_edge();
1030 let left = left_handle.edge_mut();
1031 let (key, val) = left.pop_kv();
1032 let edge = if left.is_leaf() {
1035 Some(left.pop_edge())
1041 // Swap the parent's separating key-value pair with left's
1042 mem::swap(&mut key, self.key_mut());
1043 mem::swap(&mut val, self.val_mut());
1045 // Put them at the start of right
1046 let mut right_handle = self.right_edge();
1047 let right = right_handle.edge_mut();
1048 right.insert_kv(0, key, val);
1050 Some(edge) => right.insert_edge(0, edge),
1055 /// Steal! Stealing is roughly analogous to a binary tree rotation.
1056 /// In this case, we're "rotating" left.
1057 unsafe fn steal_leftward(&mut self) {
1058 // Take the smallest stuff off right
1059 let (mut key, mut val, edge) = {
1060 let mut right_handle = self.right_edge();
1061 let right = right_handle.edge_mut();
1062 let (key, val) = right.remove_kv(0);
1063 let edge = if right.is_leaf() {
1066 Some(right.remove_edge(0))
1072 // Swap the parent's separating key-value pair with right's
1073 mem::swap(&mut key, self.key_mut());
1074 mem::swap(&mut val, self.val_mut());
1076 // Put them at the end of left
1077 let mut left_handle = self.left_edge();
1078 let left = left_handle.edge_mut();
1079 left.push_kv(key, val);
1081 Some(edge) => left.push_edge(edge),
1086 /// Merge! Smooshes left and right into one node, along with the key-value
1087 /// pair that separated them in their parent.
1088 unsafe fn merge_children(mut self) {
1089 // Permanently remove right's index, and the key-value pair that separates
1091 let (key, val) = self.node.remove_kv(self.index);
1092 let right = self.node.remove_edge(self.index + 1);
1094 // Give left right's stuff.
1097 .absorb(key, val, right);
1101 impl<K, V> Node<K, V> {
1102 /// Returns the mutable handle pointing to the key/value pair at a given index.
1104 /// # Panics (in debug build)
1106 /// Panics if the given index is out of bounds.
1107 pub fn kv_handle(&mut self,
1109 -> Handle<&mut Node<K, V>, handle::KV, handle::LeafOrInternal> {
1110 // Necessary for correctness, but in a private module
1111 debug_assert!(index < self.len(), "kv_handle index out of bounds");
1115 marker: PhantomData,
1119 pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
1120 self.as_slices_internal().iter()
1123 pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
1124 self.as_slices_internal_mut().iter_mut()
1127 pub fn into_iter(self) -> MoveTraversal<K, V> {
1129 let ret = MoveTraversal {
1130 inner: MoveTraversalImpl {
1131 keys: RawItems::from_slice(self.keys()),
1132 vals: RawItems::from_slice(self.vals()),
1133 edges: RawItems::from_slice(self.edges()),
1135 ptr: Unique::new(*self.keys as *mut u8),
1136 capacity: self.capacity(),
1137 is_leaf: self.is_leaf(),
1141 has_edges: !self.is_leaf(),
1148 /// When a node has no keys or values and only a single edge, extract that edge.
1149 pub fn hoist_lone_child(&mut self) {
1150 // Necessary for correctness, but in a private module
1151 debug_assert!(self.is_empty());
1152 debug_assert!(!self.is_leaf());
1155 let ret = ptr::read(self.edges().get_unchecked(0));
1157 ptr::write(self, ret);
1162 // Vector functions (all unchecked)
1163 impl<K, V> Node<K, V> {
1164 // This must be followed by push_edge on an internal node.
1166 unsafe fn push_kv(&mut self, key: K, val: V) {
1167 let len = self.len();
1169 ptr::write(self.keys_mut().get_unchecked_mut(len), key);
1170 ptr::write(self.vals_mut().get_unchecked_mut(len), val);
1175 // This can only be called immediately after a call to push_kv.
1177 unsafe fn push_edge(&mut self, edge: Node<K, V>) {
1178 let len = self.len();
1180 ptr::write(self.edges_mut().get_unchecked_mut(len), edge);
1183 // This must be followed by insert_edge on an internal node.
1185 unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
1186 ptr::copy(self.keys().as_ptr().offset(index as isize),
1187 self.keys_mut().as_mut_ptr().offset(index as isize + 1),
1188 self.len() - index);
1189 ptr::copy(self.vals().as_ptr().offset(index as isize),
1190 self.vals_mut().as_mut_ptr().offset(index as isize + 1),
1191 self.len() - index);
1193 ptr::write(self.keys_mut().get_unchecked_mut(index), key);
1194 ptr::write(self.vals_mut().get_unchecked_mut(index), val);
1198 self.vals_mut().get_unchecked_mut(index)
1201 // This can only be called immediately after a call to insert_kv.
1203 unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
1204 ptr::copy(self.edges().as_ptr().offset(index as isize),
1205 self.edges_mut().as_mut_ptr().offset(index as isize + 1),
1206 self.len() - index);
1207 ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
1210 // This must be followed by pop_edge on an internal node.
1212 unsafe fn pop_kv(&mut self) -> (K, V) {
1213 let key = ptr::read(self.keys().get_unchecked(self.len() - 1));
1214 let val = ptr::read(self.vals().get_unchecked(self.len() - 1));
1221 // This can only be called immediately after a call to pop_kv.
1223 unsafe fn pop_edge(&mut self) -> Node<K, V> {
1224 let edge = ptr::read(self.edges().get_unchecked(self.len() + 1));
1229 // This must be followed by remove_edge on an internal node.
1231 unsafe fn remove_kv(&mut self, index: usize) -> (K, V) {
1232 let key = ptr::read(self.keys().get_unchecked(index));
1233 let val = ptr::read(self.vals().get_unchecked(index));
1235 ptr::copy(self.keys().as_ptr().offset(index as isize + 1),
1236 self.keys_mut().as_mut_ptr().offset(index as isize),
1237 self.len() - index - 1);
1238 ptr::copy(self.vals().as_ptr().offset(index as isize + 1),
1239 self.vals_mut().as_mut_ptr().offset(index as isize),
1240 self.len() - index - 1);
1247 // This can only be called immediately after a call to remove_kv.
1249 unsafe fn remove_edge(&mut self, index: usize) -> Node<K, V> {
1250 let edge = ptr::read(self.edges().get_unchecked(index));
1252 ptr::copy(self.edges().as_ptr().offset(index as isize + 1),
1253 self.edges_mut().as_mut_ptr().offset(index as isize),
1254 // index can be == len+1, so do the +1 first to avoid underflow.
1255 (self.len() + 1) - index);
1261 // Private implementation details
1262 impl<K, V> Node<K, V> {
1263 /// Node is full, so split it into two nodes, and yield the middle-most key-value pair
1264 /// because we have one too many, and our parent now has one too few
1265 fn split(&mut self) -> (K, V, Node<K, V>) {
1266 // Necessary for correctness, but in a private function
1267 debug_assert!(!self.is_empty());
1269 let mut right = if self.is_leaf() {
1270 Node::new_leaf(self.capacity())
1272 unsafe { Node::new_internal(self.capacity()) }
1276 right._len = self.len() / 2;
1277 let right_offset = self.len() - right.len();
1278 ptr::copy_nonoverlapping(self.keys().as_ptr().offset(right_offset as isize),
1279 right.keys_mut().as_mut_ptr(),
1281 ptr::copy_nonoverlapping(self.vals().as_ptr().offset(right_offset as isize),
1282 right.vals_mut().as_mut_ptr(),
1284 if !self.is_leaf() {
1285 ptr::copy_nonoverlapping(self.edges().as_ptr().offset(right_offset as isize),
1286 right.edges_mut().as_mut_ptr(),
1290 let key = ptr::read(self.keys().get_unchecked(right_offset - 1));
1291 let val = ptr::read(self.vals().get_unchecked(right_offset - 1));
1293 self._len = right_offset - 1;
1299 /// Take all the values from right, separated by the given key and value
1300 fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
1301 // Necessary for correctness, but in a private function
1302 // Just as a sanity check, make sure we can fit this guy in
1303 debug_assert!(self.len() + right.len() <= self.capacity());
1304 debug_assert!(self.is_leaf() == right.is_leaf());
1307 let old_len = self.len();
1308 self._len += right.len() + 1;
1310 ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
1311 ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
1313 ptr::copy_nonoverlapping(right.keys().as_ptr(),
1314 self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
1316 ptr::copy_nonoverlapping(right.vals().as_ptr(),
1317 self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
1319 if !self.is_leaf() {
1320 ptr::copy_nonoverlapping(right.edges().as_ptr(),
1323 .offset(old_len as isize + 1),
1333 /// Get the capacity of a node from the order of the parent B-Tree
1334 fn capacity_from_b(b: usize) -> usize {
1338 /// Get the minimum load of a node from its capacity
1339 fn min_load_from_capacity(cap: usize) -> usize {
1344 /// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
1345 /// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
1346 /// and a pair of `Iterator`s would require two independent destructors.
1347 trait TraversalImpl {
1351 fn next_kv(&mut self) -> Option<Self::Item>;
1352 fn next_kv_back(&mut self) -> Option<Self::Item>;
1354 fn next_edge(&mut self) -> Option<Self::Edge>;
1355 fn next_edge_back(&mut self) -> Option<Self::Edge>;
1358 /// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
1359 /// as no deallocation needs to be done.
1361 struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
1363 impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
1364 TraversalImpl for ElemsAndEdges<Elems, Edges>
1365 where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
1370 fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
1371 fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
1373 fn next_edge(&mut self) -> Option<E> { self.1.next() }
1374 fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
1377 /// A `TraversalImpl` taking a `Node` by value.
1378 struct MoveTraversalImpl<K, V> {
1381 edges: RawItems<Node<K, V>>,
1383 // For deallocation when we are done iterating.
1389 unsafe impl<K: Sync, V: Sync> Sync for MoveTraversalImpl<K, V> {}
1390 unsafe impl<K: Send, V: Send> Send for MoveTraversalImpl<K, V> {}
1392 impl<K, V> TraversalImpl for MoveTraversalImpl<K, V> {
1394 type Edge = Node<K, V>;
1396 fn next_kv(&mut self) -> Option<(K, V)> {
1397 match (self.keys.next(), self.vals.next()) {
1398 (Some(k), Some(v)) => Some((k, v)),
1403 fn next_kv_back(&mut self) -> Option<(K, V)> {
1404 match (self.keys.next_back(), self.vals.next_back()) {
1405 (Some(k), Some(v)) => Some((k, v)),
1410 fn next_edge(&mut self) -> Option<Node<K, V>> {
1411 // Necessary for correctness, but in a private module
1412 debug_assert!(!self.is_leaf);
1416 fn next_edge_back(&mut self) -> Option<Node<K, V>> {
1417 // Necessary for correctness, but in a private module
1418 debug_assert!(!self.is_leaf);
1419 self.edges.next_back()
1423 impl<K, V> Drop for MoveTraversalImpl<K, V> {
1424 #[unsafe_destructor_blind_to_params]
1425 fn drop(&mut self) {
1426 // We need to cleanup the stored values manually, as the RawItems destructor would run
1427 // after our deallocation.
1428 for _ in self.keys.by_ref() {}
1429 for _ in self.vals.by_ref() {}
1430 for _ in self.edges.by_ref() {}
1432 let (alignment, size) = calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
1433 unsafe { heap::deallocate(*self.ptr, size, alignment) };
1437 /// An abstraction over all the different kinds of traversals a node supports
1439 struct AbsTraversal<Impl> {
1446 /// A single atomic step in a traversal.
1447 pub enum TraversalItem<K, V, E> {
1448 /// An element is visited. This isn't written as `Elem(K, V)` just because `opt.map(Elem)`
1449 /// requires the function to take a single argument. (Enum constructors are functions.)
1451 /// An edge is followed.
1455 /// A traversal over a node's entries and edges
1456 pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1457 slice::Iter<'a, V>>,
1458 slice::Iter<'a, Node<K, V>>>>;
1460 /// A mutable traversal over a node's entries and edges
1461 pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1462 slice::IterMut<'a, V>>,
1463 slice::IterMut<'a, Node<K, V>>>>;
1465 /// An owning traversal over a node's entries and edges
1466 pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
1469 impl<K, V, E, Impl> Iterator for AbsTraversal<Impl>
1470 where Impl: TraversalImpl<Item = (K, V), Edge = E>
1472 type Item = TraversalItem<K, V, E>;
1474 fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
1475 self.next_edge_item().map(Edge).or_else(|| self.next_kv_item().map(Elem))
1479 impl<K, V, E, Impl> DoubleEndedIterator for AbsTraversal<Impl>
1480 where Impl: TraversalImpl<Item = (K, V), Edge = E>
1482 fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
1483 self.next_edge_item_back().map(Edge).or_else(|| self.next_kv_item_back().map(Elem))
1487 impl<K, V, E, Impl> AbsTraversal<Impl> where Impl: TraversalImpl<Item = (K, V), Edge = E> {
1488 /// Advances the iterator and returns the item if it's an edge. Returns None
1489 /// and does nothing if the first item is not an edge.
1490 pub fn next_edge_item(&mut self) -> Option<E> {
1491 // NB. `&& self.has_edges` might be redundant in this condition.
1492 let edge = if self.head_is_edge && self.has_edges {
1493 self.inner.next_edge()
1497 self.head_is_edge = false;
1501 /// Advances the iterator and returns the item if it's an edge. Returns None
1502 /// and does nothing if the last item is not an edge.
1503 pub fn next_edge_item_back(&mut self) -> Option<E> {
1504 let edge = if self.tail_is_edge && self.has_edges {
1505 self.inner.next_edge_back()
1509 self.tail_is_edge = false;
1513 /// Advances the iterator and returns the item if it's a key-value pair. Returns None
1514 /// and does nothing if the first item is not a key-value pair.
1515 pub fn next_kv_item(&mut self) -> Option<(K, V)> {
1516 if !self.head_is_edge {
1517 self.head_is_edge = true;
1518 self.inner.next_kv()
1524 /// Advances the iterator and returns the item if it's a key-value pair. Returns None
1525 /// and does nothing if the last item is not a key-value pair.
1526 pub fn next_kv_item_back(&mut self) -> Option<(K, V)> {
1527 if !self.tail_is_edge {
1528 self.tail_is_edge = true;
1529 self.inner.next_kv_back()
1536 macro_rules! node_slice_impl {
1537 ($NodeSlice:ident, $Traversal:ident,
1538 $as_slices_internal:ident, $index:ident, $iter:ident) => {
1539 impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> {
1540 /// Performs linear search in a slice. Returns a tuple of (index, is_exact_match).
1541 fn search_linear<Q: ?Sized>(&self, key: &Q) -> (usize, bool)
1542 where K: Borrow<Q>, Q: Ord {
1543 for (i, k) in self.keys.iter().enumerate() {
1544 match key.cmp(k.borrow()) {
1546 Equal => return (i, true),
1547 Less => return (i, false),
1550 (self.keys.len(), false)
1553 /// Returns a sub-slice with elements starting with `min_key`.
1554 pub fn slice_from<Q: ?Sized + Ord>(self, min_key: &Q) -> $NodeSlice<'a, K, V> where
1558 // |_1_|_3_|_5_|_7_|
1560 // 0 0 1 1 2 2 3 3 4 index
1562 // \___|___|___|___/ slice_from(&0); pos = 0
1563 // \___|___|___/ slice_from(&2); pos = 1
1564 // |___|___|___/ slice_from(&3); pos = 1; result.head_is_edge = false
1565 // \___|___/ slice_from(&4); pos = 2
1566 // \___/ slice_from(&6); pos = 3
1567 // \|/ slice_from(&999); pos = 4
1568 let (pos, pos_is_kv) = self.search_linear(min_key);
1570 has_edges: self.has_edges,
1571 edges: if !self.has_edges {
1574 self.edges.$index(pos ..)
1576 keys: &self.keys[pos ..],
1577 vals: self.vals.$index(pos ..),
1578 head_is_edge: !pos_is_kv,
1579 tail_is_edge: self.tail_is_edge,
1583 /// Returns a sub-slice with elements up to and including `max_key`.
1584 pub fn slice_to<Q: ?Sized + Ord>(self, max_key: &Q) -> $NodeSlice<'a, K, V> where
1588 // |_1_|_3_|_5_|_7_|
1590 // 0 0 1 1 2 2 3 3 4 index
1592 //\|/ | | | | slice_to(&0); pos = 0
1593 // \___/ | | | slice_to(&2); pos = 1
1594 // \___|___| | | slice_to(&3); pos = 1; result.tail_is_edge = false
1595 // \___|___/ | | slice_to(&4); pos = 2
1596 // \___|___|___/ | slice_to(&6); pos = 3
1597 // \___|___|___|___/ slice_to(&999); pos = 4
1598 let (pos, pos_is_kv) = self.search_linear(max_key);
1599 let pos = pos + if pos_is_kv { 1 } else { 0 };
1601 has_edges: self.has_edges,
1602 edges: if !self.has_edges {
1605 self.edges.$index(.. (pos + 1))
1607 keys: &self.keys[..pos],
1608 vals: self.vals.$index(.. pos),
1609 head_is_edge: self.head_is_edge,
1610 tail_is_edge: !pos_is_kv,
1615 impl<'a, K: 'a, V: 'a> $NodeSlice<'a, K, V> {
1616 /// Returns an iterator over key/value pairs and edges in a slice.
1618 pub fn $iter(self) -> $Traversal<'a, K, V> {
1619 let mut edges = self.edges.$iter();
1620 // Skip edges at both ends, if excluded.
1621 if !self.head_is_edge { edges.next(); }
1622 if !self.tail_is_edge { edges.next_back(); }
1623 // The key iterator is always immutable.
1625 inner: ElemsAndEdges(
1626 self.keys.iter().zip(self.vals.$iter()),
1629 head_is_edge: self.head_is_edge,
1630 tail_is_edge: self.tail_is_edge,
1631 has_edges: self.has_edges,
1638 node_slice_impl!(NodeSlice, Traversal, as_slices_internal, index, iter);
1639 node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, index_mut, iter_mut);