1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // This module represents all the internal representation and logic for a B-Tree's node
12 // with a safe interface, so that BTreeMap itself does not depend on any of these details.
14 pub use self::InsertionResult::*;
15 pub use self::SearchResult::*;
16 pub use self::ForceResult::*;
17 pub use self::TraversalItem::*;
20 use core::prelude::v1::*;
22 use core::cmp::Ordering::{Greater, Less, Equal};
23 use core::intrinsics::arith_offset;
25 use core::marker::PhantomData;
26 use core::ops::{Deref, DerefMut, Index, IndexMut};
27 use core::ptr::Unique;
28 use core::{slice, mem, ptr, cmp, raw};
29 use alloc::heap::{self, EMPTY};
33 /// Represents the result of an Insertion: either the item fit, or the node had to split
34 pub enum InsertionResult<K, V> {
35 /// The inserted element fit
37 /// The inserted element did not fit, so the node was split
38 Split(K, V, Node<K, V>),
41 /// Represents the result of a search for a key in a single node
42 pub enum SearchResult<NodeRef> {
43 /// The element was found at the given index
44 Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
45 /// The element wasn't found, but if it's anywhere, it must be beyond this edge
46 GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
49 /// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
50 #[unsafe_no_drop_flag]
51 pub struct Node<K, V> {
52 // To avoid the need for multiple allocations, we allocate a single buffer with enough space
53 // for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
54 // Despite this, we store three separate pointers to the three "chunks" of the buffer because
55 // the performance drops significantly if the locations of the vals and edges need to be
56 // recalculated upon access.
58 // These will never be null during normal usage of a `Node`. However, to avoid the need for a
59 // drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
64 // In leaf nodes, this will be None, and no space will be allocated for edges.
65 edges: Option<Unique<Node<K, V>>>,
67 // At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
68 // `_len + 1` edges. In a leaf node, there will never be any edges.
70 // Note: instead of accessing this field directly, please call the `len()` method, which should
71 // be more stable in the face of representation changes.
74 // FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
75 // be constant throughout the tree. Once a solution to this is found, it might be possible to
76 // also pass down the offsets into the buffer that vals and edges are stored at, removing the
77 // need for those two pointers.
79 // Note: instead of accessing this field directly, please call the `capacity()` method, which
80 // should be more stable in the face of representation changes.
84 struct NodeSlice<'a, K: 'a, V: 'a> {
87 pub edges: &'a [Node<K, V>],
93 struct MutNodeSlice<'a, K: 'a, V: 'a> {
96 pub edges: &'a mut [Node<K, V>],
102 /// Rounds up to a multiple of a power of two. Returns the closest multiple
103 /// of `target_alignment` that is higher or equal to `unrounded`.
107 /// Fails if `target_alignment` is not a power of two.
109 fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
110 assert!(target_alignment.is_power_of_two());
111 (unrounded + target_alignment - 1) & !(target_alignment - 1)
116 assert_eq!(round_up_to_next(0, 4), 0);
117 assert_eq!(round_up_to_next(1, 4), 4);
118 assert_eq!(round_up_to_next(2, 4), 4);
119 assert_eq!(round_up_to_next(3, 4), 4);
120 assert_eq!(round_up_to_next(4, 4), 4);
121 assert_eq!(round_up_to_next(5, 4), 8);
124 // Returns a tuple of (val_offset, edge_offset),
125 // from the start of a mallocated array.
127 fn calculate_offsets(keys_size: usize,
128 vals_size: usize, vals_align: usize,
131 let vals_offset = round_up_to_next(keys_size, vals_align);
132 let end_of_vals = vals_offset + vals_size;
134 let edges_offset = round_up_to_next(end_of_vals, edges_align);
136 (vals_offset, edges_offset)
139 // Returns a tuple of (minimum required alignment, array_size),
140 // from the start of a mallocated array.
142 fn calculate_allocation(keys_size: usize, keys_align: usize,
143 vals_size: usize, vals_align: usize,
144 edges_size: usize, edges_align: usize)
146 let (_, edges_offset) = calculate_offsets(keys_size,
147 vals_size, vals_align,
149 let end_of_edges = edges_offset + edges_size;
151 let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
153 (min_align, end_of_edges)
157 fn test_offset_calculation() {
158 assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
159 assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
160 assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
161 assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
162 assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
163 assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
166 fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
167 let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::align_of::<K>());
168 let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::align_of::<V>());
169 let (edges_size, edges_align) = if is_leaf {
172 ((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::align_of::<Node<K, V>>())
175 calculate_allocation(
176 keys_size, keys_align,
177 vals_size, vals_align,
178 edges_size, edges_align
182 fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
183 let keys_size = capacity * mem::size_of::<K>();
184 let vals_size = capacity * mem::size_of::<V>();
185 let vals_align = mem::align_of::<V>();
186 let edges_align = if is_leaf {
189 mem::align_of::<Node<K, V>>()
194 vals_size, vals_align,
199 /// An iterator over a slice that owns the elements of the slice but not the allocation.
205 impl<T> RawItems<T> {
206 unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
207 RawItems::from_parts(slice.as_ptr(), slice.len())
210 unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
211 if mem::size_of::<T>() == 0 {
214 tail: arith_offset(ptr as *const i8, len as isize) as *const T,
219 tail: ptr.offset(len as isize),
224 unsafe fn push(&mut self, val: T) {
225 ptr::write(self.tail as *mut T, val);
227 if mem::size_of::<T>() == 0 {
228 self.tail = arith_offset(self.tail as *const i8, 1) as *const T;
230 self.tail = self.tail.offset(1);
235 impl<T> Iterator for RawItems<T> {
238 fn next(&mut self) -> Option<T> {
239 if self.head == self.tail {
243 let ret = Some(ptr::read(self.head));
245 if mem::size_of::<T>() == 0 {
246 self.head = arith_offset(self.head as *const i8, 1) as *const T;
248 self.head = self.head.offset(1);
257 impl<T> DoubleEndedIterator for RawItems<T> {
258 fn next_back(&mut self) -> Option<T> {
259 if self.head == self.tail {
263 if mem::size_of::<T>() == 0 {
264 self.tail = arith_offset(self.tail as *const i8, -1) as *const T;
266 self.tail = self.tail.offset(-1);
269 Some(ptr::read(self.tail))
275 impl<T> Drop for RawItems<T> {
277 for _ in self.by_ref() {}
281 impl<K, V> Drop for Node<K, V> {
283 if self.keys.is_null() ||
284 (unsafe { self.keys.get() as *const K as usize == mem::POST_DROP_USIZE })
286 // Since we have #[unsafe_no_drop_flag], we have to watch
287 // out for the sentinel value being stored in self.keys. (Using
288 // null is technically a violation of the `Unique`
289 // requirements, though.)
293 // Do the actual cleanup.
295 drop(RawItems::from_slice(self.keys()));
296 drop(RawItems::from_slice(self.vals()));
297 drop(RawItems::from_slice(self.edges()));
302 self.keys = unsafe { Unique::new(0 as *mut K) };
306 impl<K, V> Node<K, V> {
307 /// Make a new internal node. The caller must initialize the result to fix the invariant that
308 /// there are `len() + 1` edges.
309 unsafe fn new_internal(capacity: usize) -> Node<K, V> {
310 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
312 let buffer = heap::allocate(size, alignment);
313 if buffer.is_null() { ::alloc::oom(); }
315 let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
318 keys: Unique::new(buffer as *mut K),
319 vals: Unique::new(buffer.offset(vals_offset as isize) as *mut V),
320 edges: Some(Unique::new(buffer.offset(edges_offset as isize) as *mut Node<K, V>)),
326 /// Make a new leaf node
327 fn new_leaf(capacity: usize) -> Node<K, V> {
328 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
330 let buffer = unsafe { heap::allocate(size, alignment) };
331 if buffer.is_null() { ::alloc::oom(); }
333 let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
336 keys: unsafe { Unique::new(buffer as *mut K) },
337 vals: unsafe { Unique::new(buffer.offset(vals_offset as isize) as *mut V) },
344 unsafe fn destroy(&mut self) {
345 let (alignment, size) =
346 calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
347 heap::deallocate(*self.keys as *mut u8, size, alignment);
351 pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
353 slice::from_raw_parts(*self.keys, self.len()),
354 slice::from_raw_parts(*self.vals, self.len()),
359 pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
360 unsafe { mem::transmute(self.as_slices()) }
364 pub fn as_slices_internal<'b>(&'b self) -> NodeSlice<'b, K, V> {
365 let is_leaf = self.is_leaf();
366 let (keys, vals) = self.as_slices();
367 let edges: &[_] = if self.is_leaf() {
371 let data = match self.edges {
372 None => heap::EMPTY as *const Node<K,V>,
373 Some(ref p) => **p as *const Node<K,V>,
375 mem::transmute(raw::Slice {
392 pub fn as_slices_internal_mut<'b>(&'b mut self) -> MutNodeSlice<'b, K, V> {
393 unsafe { mem::transmute(self.as_slices_internal()) }
397 pub fn keys<'a>(&'a self) -> &'a [K] {
402 pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
403 self.as_slices_mut().0
407 pub fn vals<'a>(&'a self) -> &'a [V] {
412 pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
413 self.as_slices_mut().1
417 pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
418 self.as_slices_internal().edges
422 pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
423 self.as_slices_internal_mut().edges
427 // FIXME(gereeter) Write an efficient clone_from
428 #[stable(feature = "rust1", since = "1.0.0")]
429 impl<K: Clone, V: Clone> Clone for Node<K, V> {
430 fn clone(&self) -> Node<K, V> {
431 let mut ret = if self.is_leaf() {
432 Node::new_leaf(self.capacity())
434 unsafe { Node::new_internal(self.capacity()) }
438 // For failure safety
439 let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
440 let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
441 let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
443 for key in self.keys() {
444 keys.push(key.clone())
446 for val in self.vals() {
447 vals.push(val.clone())
449 for edge in self.edges() {
450 edges.push(edge.clone())
457 ret._len = self.len();
464 /// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
465 /// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
466 /// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
467 /// accessing the stored values, and moving around the `Node`.
469 /// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
470 /// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
471 /// don't need mutability on both mutable and immutable references. Secondly and more importantly,
472 /// this allows users of the `Handle` API to associate metadata with the reference. This is used in
473 /// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
476 /// # A note on safety
478 /// Unfortunately, the extra power afforded by being generic also means that safety can technically
479 /// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
480 /// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
481 /// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
482 /// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
486 /// struct Nasty<'a> {
487 /// first: &'a Node<usize, usize>,
488 /// second: &'a Node<usize, usize>,
489 /// flag: &'a Cell<bool>,
492 /// impl<'a> Deref for Nasty<'a> {
493 /// type Target = Node<usize, usize>;
495 /// fn deref(&self) -> &Node<usize, usize> {
496 /// if self.flag.get() {
505 /// let flag = Cell::new(false);
506 /// let mut small_node = Node::make_leaf_root(3);
507 /// let mut large_node = Node::make_leaf_root(100);
509 /// for i in 0..100 {
510 /// // Insert to the end
511 /// large_node.edge_handle(i).insert_as_leaf(i, i);
514 /// let nasty = Nasty {
515 /// first: &large_node,
516 /// second: &small_node,
520 /// // The handle points at index 75.
521 /// let handle = Node::search(nasty, 75);
523 /// // Now the handle still points at index 75, but on the small node, which has no index 75.
526 /// println!("Uninitialized memory: {:?}", handle.into_kv());
529 #[derive(Copy, Clone)]
530 pub struct Handle<NodeRef, Type, NodeType> {
533 marker: PhantomData<(Type, NodeType)>,
541 // Handle node types.
542 pub enum LeafOrInternal {}
547 impl<K: Ord, V> Node<K, V> {
548 /// Searches for the given key in the node. If it finds an exact match,
549 /// `Found` will be yielded with the matching index. If it doesn't find an exact match,
550 /// `GoDown` will be yielded with the index of the subtree the key must lie in.
551 pub fn search<Q: ?Sized, NodeRef: Deref<Target=Node<K, V>>>(node: NodeRef, key: &Q)
552 -> SearchResult<NodeRef> where K: Borrow<Q>, Q: Ord {
553 // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
554 // For the B configured as of this writing (B = 6), binary search was *significantly*
556 match node.as_slices_internal().search_linear(key) {
557 (index, true) => Found(Handle { node: node, index: index, marker: PhantomData }),
558 (index, false) => GoDown(Handle { node: node, index: index, marker: PhantomData }),
564 impl <K, V> Node<K, V> {
565 /// Make a leaf root from scratch
566 pub fn make_leaf_root(b: usize) -> Node<K, V> {
567 Node::new_leaf(capacity_from_b(b))
570 /// Make an internal root and swap it with an old root
571 pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: usize, key: K, value: V,
573 let node = mem::replace(left_and_out, unsafe { Node::new_internal(capacity_from_b(b)) });
574 left_and_out._len = 1;
576 ptr::write(left_and_out.keys_mut().get_unchecked_mut(0), key);
577 ptr::write(left_and_out.vals_mut().get_unchecked_mut(0), value);
578 ptr::write(left_and_out.edges_mut().get_unchecked_mut(0), node);
579 ptr::write(left_and_out.edges_mut().get_unchecked_mut(1), right);
583 /// How many key-value pairs the node contains
584 pub fn len(&self) -> usize {
588 /// Does the node not contain any key-value pairs
589 pub fn is_empty(&self) -> bool { self.len() == 0 }
591 /// How many key-value pairs the node can fit
592 pub fn capacity(&self) -> usize {
596 /// If the node has any children
597 pub fn is_leaf(&self) -> bool {
601 /// if the node has too few elements
602 pub fn is_underfull(&self) -> bool {
603 self.len() < min_load_from_capacity(self.capacity())
606 /// if the node cannot fit any more elements
607 pub fn is_full(&self) -> bool {
608 self.len() == self.capacity()
612 impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
613 /// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
614 /// is very different from `edge` and `edge_mut` because those return children of the node
615 /// returned by `node`.
616 pub fn node(&self) -> &Node<K, V> {
621 impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType> where
622 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
624 /// Converts a handle into one that stores the same information using a raw pointer. This can
625 /// be useful in conjunction with `from_raw` when the type system is insufficient for
626 /// determining the lifetimes of the nodes.
627 pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
629 node: &mut *self.node as *mut _,
636 impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
637 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
638 /// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
639 /// unsafely extending the lifetime of the reference to the `Node`.
640 pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
648 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
649 /// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
650 /// allow unsafely extending the lifetime of the reference to the `Node`.
651 pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
653 node: &mut *self.node,
660 impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
661 /// Turns the handle into a reference to the edge it points at. This is necessary because the
662 /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
663 /// making it more suitable for moving down a chain of nodes.
664 pub fn into_edge(self) -> &'a Node<K, V> {
666 self.node.edges().get_unchecked(self.index)
671 impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
672 /// Turns the handle into a mutable reference to the edge it points at. This is necessary
673 /// because the returned pointer has a larger lifetime than what would be returned by
674 /// `edge_mut`, making it more suitable for moving down a chain of nodes.
675 pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
677 self.node.edges_mut().get_unchecked_mut(self.index)
682 impl<K, V, NodeRef: Deref<Target=Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
683 // This doesn't exist because there are no uses for it,
684 // but is fine to add, analogous to edge_mut.
686 // /// Returns a reference to the edge pointed-to by this handle. This should not be
687 // /// confused with `node`, which references the parent node of what is returned here.
688 // pub fn edge(&self) -> &Node<K, V>
691 pub enum ForceResult<NodeRef, Type> {
692 Leaf(Handle<NodeRef, Type, handle::Leaf>),
693 Internal(Handle<NodeRef, Type, handle::Internal>)
696 impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type> Handle<NodeRef, Type, handle::LeafOrInternal> {
697 /// Figure out whether this handle is pointing to something in a leaf node or to something in
698 /// an internal node, clarifying the type according to the result.
699 pub fn force(self) -> ForceResult<NodeRef, Type> {
700 if self.node.is_leaf() {
715 impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Leaf> where
716 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
718 /// Tries to insert this key-value pair at the given index in this leaf node
719 /// If the node is full, we have to split it.
721 /// Returns a *mut V to the inserted value, because the caller may want this when
722 /// they're done mutating the tree, but we don't want to borrow anything for now.
723 pub fn insert_as_leaf(mut self, key: K, value: V) ->
724 (InsertionResult<K, V>, *mut V) {
725 if !self.node.is_full() {
726 // The element can fit, just insert it
727 (Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
729 // The element can't fit, this node is full. Split it into two nodes.
730 let (new_key, new_val, mut new_right) = self.node.split();
731 let left_len = self.node.len();
734 if self.index <= left_len {
735 self.node.insert_kv(self.index, key, value)
737 // We need to subtract 1 because in splitting we took out new_key and new_val.
738 // Just being in the right node means we are past left_len k/v pairs in the
739 // left node and 1 k/v pair in the parent node.
740 new_right.insert_kv(self.index - left_len - 1, key, value)
744 (Split(new_key, new_val, new_right), ptr)
749 impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Internal> where
750 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
752 /// Returns a mutable reference to the edge pointed-to by this handle. This should not be
753 /// confused with `node`, which references the parent node of what is returned here.
754 pub fn edge_mut(&mut self) -> &mut Node<K, V> {
756 self.node.edges_mut().get_unchecked_mut(self.index)
760 /// Tries to insert this key-value pair at the given index in this internal node
761 /// If the node is full, we have to split it.
762 pub fn insert_as_internal(mut self, key: K, value: V, right: Node<K, V>)
763 -> InsertionResult<K, V> {
764 if !self.node.is_full() {
765 // The element can fit, just insert it
767 self.node.insert_kv(self.index, key, value);
768 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
772 // The element can't fit, this node is full. Split it into two nodes.
773 let (new_key, new_val, mut new_right) = self.node.split();
774 let left_len = self.node.len();
776 if self.index <= left_len {
778 self.node.insert_kv(self.index, key, value);
779 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
783 // The -1 here is for the same reason as in insert_as_internal - because we
784 // split, there are actually left_len + 1 k/v pairs before the right node, with
785 // the extra 1 being put in the parent.
786 new_right.insert_kv(self.index - left_len - 1, key, value);
787 new_right.insert_edge(self.index - left_len, right);
791 Split(new_key, new_val, new_right)
795 /// Handle an underflow in this node's child. We favour handling "to the left" because we know
796 /// we're empty, but our neighbour can be full. Handling to the left means when we choose to
797 /// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
798 /// (always slow, but at least faster since we know we're half-empty).
799 /// Handling "to the right" reverses these roles. Of course, we merge whenever possible
800 /// because we want dense nodes, and merging is about equal work regardless of direction.
801 pub fn handle_underflow(mut self) {
804 self.handle_underflow_to_left();
806 self.handle_underflow_to_right();
811 /// Right is underflowed. Tries to steal from left,
812 /// but merges left and right if left is low too.
813 unsafe fn handle_underflow_to_left(&mut self) {
814 let left_len = self.node.edges()[self.index - 1].len();
815 if left_len > min_load_from_capacity(self.node.capacity()) {
816 self.left_kv().steal_rightward();
818 self.left_kv().merge_children();
822 /// Left is underflowed. Tries to steal from the right,
823 /// but merges left and right if right is low too.
824 unsafe fn handle_underflow_to_right(&mut self) {
825 let right_len = self.node.edges()[self.index + 1].len();
826 if right_len > min_load_from_capacity(self.node.capacity()) {
827 self.right_kv().steal_leftward();
829 self.right_kv().merge_children();
834 impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType> where
835 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
837 /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
838 /// This is unsafe because the handle might point to the first edge in the node, which has no
839 /// pair to its left.
840 unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
842 node: &mut *self.node,
843 index: self.index - 1,
848 /// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
849 /// This is unsafe because the handle might point to the last edge in the node, which has no
850 /// pair to its right.
851 unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
853 node: &mut *self.node,
860 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
861 /// Turns the handle into references to the key and value it points at. This is necessary
862 /// because the returned pointers have larger lifetimes than what would be returned by `key`
864 pub fn into_kv(self) -> (&'a K, &'a V) {
865 let (keys, vals) = self.node.as_slices();
868 keys.get_unchecked(self.index),
869 vals.get_unchecked(self.index)
875 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
876 /// Turns the handle into mutable references to the key and value it points at. This is
877 /// necessary because the returned pointers have larger lifetimes than what would be returned
878 /// by `key_mut` or `val_mut`.
879 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
880 let (keys, vals) = self.node.as_slices_mut();
883 keys.get_unchecked_mut(self.index),
884 vals.get_unchecked_mut(self.index)
889 /// Convert this handle into one pointing at the edge immediately to the left of the key/value
890 /// pair pointed-to by this handle. This is useful because it returns a reference with larger
891 /// lifetime than `left_edge`.
892 pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
894 node: &mut *self.node,
901 impl<'a, K: 'a, V: 'a, NodeRef: Deref<Target=Node<K, V>> + 'a, NodeType> Handle<NodeRef, handle::KV,
903 // These are fine to include, but are currently unneeded.
905 // /// Returns a reference to the key pointed-to by this handle. This doesn't return a
906 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
908 // pub fn key(&'a self) -> &'a K {
909 // unsafe { self.node.keys().get_unchecked(self.index) }
912 // /// Returns a reference to the value pointed-to by this handle. This doesn't return a
913 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
915 // pub fn val(&'a self) -> &'a V {
916 // unsafe { self.node.vals().get_unchecked(self.index) }
920 impl<'a, K: 'a, V: 'a, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
921 NodeRef: 'a + Deref<Target=Node<K, V>> + DerefMut,
923 /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
924 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
926 pub fn key_mut(&'a mut self) -> &'a mut K {
927 unsafe { self.node.keys_mut().get_unchecked_mut(self.index) }
930 /// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
931 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
933 pub fn val_mut(&'a mut self) -> &'a mut V {
934 unsafe { self.node.vals_mut().get_unchecked_mut(self.index) }
938 impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
939 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
941 /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
942 /// to by this handle.
943 pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
945 node: &mut *self.node,
951 /// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
952 /// to by this handle.
953 pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
955 node: &mut *self.node,
956 index: self.index + 1,
962 impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Leaf> where
963 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
965 /// Removes the key/value pair at the handle's location.
967 /// # Panics (in debug build)
969 /// Panics if the node containing the pair is not a leaf node.
970 pub fn remove_as_leaf(mut self) -> (K, V) {
971 unsafe { self.node.remove_kv(self.index) }
975 impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Internal> where
976 NodeRef: Deref<Target=Node<K, V>> + DerefMut
978 /// Steal! Stealing is roughly analogous to a binary tree rotation.
979 /// In this case, we're "rotating" right.
980 unsafe fn steal_rightward(&mut self) {
981 // Take the biggest stuff off left
982 let (mut key, mut val, edge) = {
983 let mut left_handle = self.left_edge();
984 let left = left_handle.edge_mut();
985 let (key, val) = left.pop_kv();
986 let edge = if left.is_leaf() {
989 Some(left.pop_edge())
995 // Swap the parent's separating key-value pair with left's
996 mem::swap(&mut key, self.key_mut());
997 mem::swap(&mut val, self.val_mut());
999 // Put them at the start of right
1000 let mut right_handle = self.right_edge();
1001 let right = right_handle.edge_mut();
1002 right.insert_kv(0, key, val);
1004 Some(edge) => right.insert_edge(0, edge),
1009 /// Steal! Stealing is roughly analogous to a binary tree rotation.
1010 /// In this case, we're "rotating" left.
1011 unsafe fn steal_leftward(&mut self) {
1012 // Take the smallest stuff off right
1013 let (mut key, mut val, edge) = {
1014 let mut right_handle = self.right_edge();
1015 let right = right_handle.edge_mut();
1016 let (key, val) = right.remove_kv(0);
1017 let edge = if right.is_leaf() {
1020 Some(right.remove_edge(0))
1026 // Swap the parent's separating key-value pair with right's
1027 mem::swap(&mut key, self.key_mut());
1028 mem::swap(&mut val, self.val_mut());
1030 // Put them at the end of left
1031 let mut left_handle = self.left_edge();
1032 let left = left_handle.edge_mut();
1033 left.push_kv(key, val);
1035 Some(edge) => left.push_edge(edge),
1040 /// Merge! Smooshes left and right into one node, along with the key-value
1041 /// pair that separated them in their parent.
1042 unsafe fn merge_children(mut self) {
1043 // Permanently remove right's index, and the key-value pair that separates
1045 let (key, val) = self.node.remove_kv(self.index);
1046 let right = self.node.remove_edge(self.index + 1);
1048 // Give left right's stuff.
1049 self.left_edge().edge_mut()
1050 .absorb(key, val, right);
1054 impl<K, V> Node<K, V> {
1055 /// Returns the mutable handle pointing to the key/value pair at a given index.
1057 /// # Panics (in debug build)
1059 /// Panics if the given index is out of bounds.
1060 pub fn kv_handle(&mut self, index: usize) -> Handle<&mut Node<K, V>, handle::KV,
1061 handle::LeafOrInternal> {
1062 // Necessary for correctness, but in a private module
1063 debug_assert!(index < self.len(), "kv_handle index out of bounds");
1067 marker: PhantomData,
1071 pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
1072 self.as_slices_internal().iter()
1075 pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
1076 self.as_slices_internal_mut().iter_mut()
1079 pub fn into_iter(self) -> MoveTraversal<K, V> {
1081 let ret = MoveTraversal {
1082 inner: MoveTraversalImpl {
1083 keys: RawItems::from_slice(self.keys()),
1084 vals: RawItems::from_slice(self.vals()),
1085 edges: RawItems::from_slice(self.edges()),
1087 ptr: Unique::new(*self.keys as *mut u8),
1088 capacity: self.capacity(),
1089 is_leaf: self.is_leaf()
1093 has_edges: !self.is_leaf(),
1100 /// When a node has no keys or values and only a single edge, extract that edge.
1101 pub fn hoist_lone_child(&mut self) {
1102 // Necessary for correctness, but in a private module
1103 debug_assert!(self.is_empty());
1104 debug_assert!(!self.is_leaf());
1107 let ret = ptr::read(self.edges().get_unchecked(0));
1109 ptr::write(self, ret);
1114 // Vector functions (all unchecked)
1115 impl<K, V> Node<K, V> {
1116 // This must be followed by push_edge on an internal node.
1118 unsafe fn push_kv(&mut self, key: K, val: V) {
1119 let len = self.len();
1121 ptr::write(self.keys_mut().get_unchecked_mut(len), key);
1122 ptr::write(self.vals_mut().get_unchecked_mut(len), val);
1127 // This can only be called immediately after a call to push_kv.
1129 unsafe fn push_edge(&mut self, edge: Node<K, V>) {
1130 let len = self.len();
1132 ptr::write(self.edges_mut().get_unchecked_mut(len), edge);
1135 // This must be followed by insert_edge on an internal node.
1137 unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
1139 self.keys().as_ptr().offset(index as isize),
1140 self.keys_mut().as_mut_ptr().offset(index as isize + 1),
1144 self.vals().as_ptr().offset(index as isize),
1145 self.vals_mut().as_mut_ptr().offset(index as isize + 1),
1149 ptr::write(self.keys_mut().get_unchecked_mut(index), key);
1150 ptr::write(self.vals_mut().get_unchecked_mut(index), val);
1154 self.vals_mut().get_unchecked_mut(index)
1157 // This can only be called immediately after a call to insert_kv.
1159 unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
1161 self.edges().as_ptr().offset(index as isize),
1162 self.edges_mut().as_mut_ptr().offset(index as isize + 1),
1165 ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
1168 // This must be followed by pop_edge on an internal node.
1170 unsafe fn pop_kv(&mut self) -> (K, V) {
1171 let key = ptr::read(self.keys().get_unchecked(self.len() - 1));
1172 let val = ptr::read(self.vals().get_unchecked(self.len() - 1));
1179 // This can only be called immediately after a call to pop_kv.
1181 unsafe fn pop_edge(&mut self) -> Node<K, V> {
1182 let edge = ptr::read(self.edges().get_unchecked(self.len() + 1));
1187 // This must be followed by remove_edge on an internal node.
1189 unsafe fn remove_kv(&mut self, index: usize) -> (K, V) {
1190 let key = ptr::read(self.keys().get_unchecked(index));
1191 let val = ptr::read(self.vals().get_unchecked(index));
1194 self.keys().as_ptr().offset(index as isize + 1),
1195 self.keys_mut().as_mut_ptr().offset(index as isize),
1196 self.len() - index - 1
1199 self.vals().as_ptr().offset(index as isize + 1),
1200 self.vals_mut().as_mut_ptr().offset(index as isize),
1201 self.len() - index - 1
1209 // This can only be called immediately after a call to remove_kv.
1211 unsafe fn remove_edge(&mut self, index: usize) -> Node<K, V> {
1212 let edge = ptr::read(self.edges().get_unchecked(index));
1215 self.edges().as_ptr().offset(index as isize + 1),
1216 self.edges_mut().as_mut_ptr().offset(index as isize),
1217 // index can be == len+1, so do the +1 first to avoid underflow.
1218 (self.len() + 1) - index
1225 // Private implementation details
1226 impl<K, V> Node<K, V> {
1227 /// Node is full, so split it into two nodes, and yield the middle-most key-value pair
1228 /// because we have one too many, and our parent now has one too few
1229 fn split(&mut self) -> (K, V, Node<K, V>) {
1230 // Necessary for correctness, but in a private function
1231 debug_assert!(!self.is_empty());
1233 let mut right = if self.is_leaf() {
1234 Node::new_leaf(self.capacity())
1236 unsafe { Node::new_internal(self.capacity()) }
1240 right._len = self.len() / 2;
1241 let right_offset = self.len() - right.len();
1242 ptr::copy_nonoverlapping(
1243 self.keys().as_ptr().offset(right_offset as isize),
1244 right.keys_mut().as_mut_ptr(),
1247 ptr::copy_nonoverlapping(
1248 self.vals().as_ptr().offset(right_offset as isize),
1249 right.vals_mut().as_mut_ptr(),
1252 if !self.is_leaf() {
1253 ptr::copy_nonoverlapping(
1254 self.edges().as_ptr().offset(right_offset as isize),
1255 right.edges_mut().as_mut_ptr(),
1260 let key = ptr::read(self.keys().get_unchecked(right_offset - 1));
1261 let val = ptr::read(self.vals().get_unchecked(right_offset - 1));
1263 self._len = right_offset - 1;
1269 /// Take all the values from right, separated by the given key and value
1270 fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
1271 // Necessary for correctness, but in a private function
1272 // Just as a sanity check, make sure we can fit this guy in
1273 debug_assert!(self.len() + right.len() <= self.capacity());
1274 debug_assert!(self.is_leaf() == right.is_leaf());
1277 let old_len = self.len();
1278 self._len += right.len() + 1;
1280 ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
1281 ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
1283 ptr::copy_nonoverlapping(
1284 right.keys().as_ptr(),
1285 self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
1288 ptr::copy_nonoverlapping(
1289 right.vals().as_ptr(),
1290 self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
1293 if !self.is_leaf() {
1294 ptr::copy_nonoverlapping(
1295 right.edges().as_ptr(),
1296 self.edges_mut().as_mut_ptr().offset(old_len as isize + 1),
1307 /// Get the capacity of a node from the order of the parent B-Tree
1308 fn capacity_from_b(b: usize) -> usize {
1312 /// Get the minimum load of a node from its capacity
1313 fn min_load_from_capacity(cap: usize) -> usize {
1318 /// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
1319 /// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
1320 /// and a pair of `Iterator`s would require two independent destructors.
1321 trait TraversalImpl {
1325 fn next_kv(&mut self) -> Option<Self::Item>;
1326 fn next_kv_back(&mut self) -> Option<Self::Item>;
1328 fn next_edge(&mut self) -> Option<Self::Edge>;
1329 fn next_edge_back(&mut self) -> Option<Self::Edge>;
1332 /// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
1333 /// as no deallocation needs to be done.
1335 struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
1337 impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
1338 TraversalImpl for ElemsAndEdges<Elems, Edges>
1339 where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
1344 fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
1345 fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
1347 fn next_edge(&mut self) -> Option<E> { self.1.next() }
1348 fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
1351 /// A `TraversalImpl` taking a `Node` by value.
1352 struct MoveTraversalImpl<K, V> {
1355 edges: RawItems<Node<K, V>>,
1357 // For deallocation when we are done iterating.
1363 unsafe impl<K: Sync, V: Sync> Sync for MoveTraversalImpl<K, V> {}
1364 unsafe impl<K: Send, V: Send> Send for MoveTraversalImpl<K, V> {}
1366 impl<K, V> TraversalImpl for MoveTraversalImpl<K, V> {
1368 type Edge = Node<K, V>;
1370 fn next_kv(&mut self) -> Option<(K, V)> {
1371 match (self.keys.next(), self.vals.next()) {
1372 (Some(k), Some(v)) => Some((k, v)),
1377 fn next_kv_back(&mut self) -> Option<(K, V)> {
1378 match (self.keys.next_back(), self.vals.next_back()) {
1379 (Some(k), Some(v)) => Some((k, v)),
1384 fn next_edge(&mut self) -> Option<Node<K, V>> {
1385 // Necessary for correctness, but in a private module
1386 debug_assert!(!self.is_leaf);
1390 fn next_edge_back(&mut self) -> Option<Node<K, V>> {
1391 // Necessary for correctness, but in a private module
1392 debug_assert!(!self.is_leaf);
1393 self.edges.next_back()
1397 impl<K, V> Drop for MoveTraversalImpl<K, V> {
1398 fn drop(&mut self) {
1399 // We need to cleanup the stored values manually, as the RawItems destructor would run
1400 // after our deallocation.
1401 for _ in self.keys.by_ref() {}
1402 for _ in self.vals.by_ref() {}
1403 for _ in self.edges.by_ref() {}
1405 let (alignment, size) =
1406 calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
1407 unsafe { heap::deallocate(*self.ptr, size, alignment) };
1411 /// An abstraction over all the different kinds of traversals a node supports
1413 struct AbsTraversal<Impl> {
1420 /// A single atomic step in a traversal.
1421 pub enum TraversalItem<K, V, E> {
1422 /// An element is visited. This isn't written as `Elem(K, V)` just because `opt.map(Elem)`
1423 /// requires the function to take a single argument. (Enum constructors are functions.)
1425 /// An edge is followed.
1429 /// A traversal over a node's entries and edges
1430 pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1431 slice::Iter<'a, V>>,
1432 slice::Iter<'a, Node<K, V>>>>;
1434 /// A mutable traversal over a node's entries and edges
1435 pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1436 slice::IterMut<'a, V>>,
1437 slice::IterMut<'a, Node<K, V>>>>;
1439 /// An owning traversal over a node's entries and edges
1440 pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
1443 impl<K, V, E, Impl> Iterator for AbsTraversal<Impl>
1444 where Impl: TraversalImpl<Item=(K, V), Edge=E> {
1445 type Item = TraversalItem<K, V, E>;
1447 fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
1448 self.next_edge_item().map(Edge).or_else(||
1449 self.next_kv_item().map(Elem)
1454 impl<K, V, E, Impl> DoubleEndedIterator for AbsTraversal<Impl>
1455 where Impl: TraversalImpl<Item=(K, V), Edge=E> {
1456 fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
1457 self.next_edge_item_back().map(Edge).or_else(||
1458 self.next_kv_item_back().map(Elem)
1463 impl<K, V, E, Impl> AbsTraversal<Impl>
1464 where Impl: TraversalImpl<Item=(K, V), Edge=E> {
1465 /// Advances the iterator and returns the item if it's an edge. Returns None
1466 /// and does nothing if the first item is not an edge.
1467 pub fn next_edge_item(&mut self) -> Option<E> {
1468 // NB. `&& self.has_edges` might be redundant in this condition.
1469 let edge = if self.head_is_edge && self.has_edges {
1470 self.inner.next_edge()
1474 self.head_is_edge = false;
1478 /// Advances the iterator and returns the item if it's an edge. Returns None
1479 /// and does nothing if the last item is not an edge.
1480 pub fn next_edge_item_back(&mut self) -> Option<E> {
1481 let edge = if self.tail_is_edge && self.has_edges {
1482 self.inner.next_edge_back()
1486 self.tail_is_edge = false;
1490 /// Advances the iterator and returns the item if it's a key-value pair. Returns None
1491 /// and does nothing if the first item is not a key-value pair.
1492 pub fn next_kv_item(&mut self) -> Option<(K, V)> {
1493 if !self.head_is_edge {
1494 self.head_is_edge = true;
1495 self.inner.next_kv()
1501 /// Advances the iterator and returns the item if it's a key-value pair. Returns None
1502 /// and does nothing if the last item is not a key-value pair.
1503 pub fn next_kv_item_back(&mut self) -> Option<(K, V)> {
1504 if !self.tail_is_edge {
1505 self.tail_is_edge = true;
1506 self.inner.next_kv_back()
1513 macro_rules! node_slice_impl {
1514 ($NodeSlice:ident, $Traversal:ident,
1515 $as_slices_internal:ident, $index:ident, $iter:ident) => {
1516 impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> {
1517 /// Performs linear search in a slice. Returns a tuple of (index, is_exact_match).
1518 fn search_linear<Q: ?Sized>(&self, key: &Q) -> (usize, bool)
1519 where K: Borrow<Q>, Q: Ord {
1520 for (i, k) in self.keys.iter().enumerate() {
1521 match key.cmp(k.borrow()) {
1523 Equal => return (i, true),
1524 Less => return (i, false),
1527 (self.keys.len(), false)
1530 /// Returns a sub-slice with elements starting with `min_key`.
1531 pub fn slice_from<Q: ?Sized + Ord>(self, min_key: &Q) -> $NodeSlice<'a, K, V> where
1535 // |_1_|_3_|_5_|_7_|
1537 // 0 0 1 1 2 2 3 3 4 index
1539 // \___|___|___|___/ slice_from(&0); pos = 0
1540 // \___|___|___/ slice_from(&2); pos = 1
1541 // |___|___|___/ slice_from(&3); pos = 1; result.head_is_edge = false
1542 // \___|___/ slice_from(&4); pos = 2
1543 // \___/ slice_from(&6); pos = 3
1544 // \|/ slice_from(&999); pos = 4
1545 let (pos, pos_is_kv) = self.search_linear(min_key);
1547 has_edges: self.has_edges,
1548 edges: if !self.has_edges {
1551 self.edges.$index(pos ..)
1553 keys: &self.keys[pos ..],
1554 vals: self.vals.$index(pos ..),
1555 head_is_edge: !pos_is_kv,
1556 tail_is_edge: self.tail_is_edge,
1560 /// Returns a sub-slice with elements up to and including `max_key`.
1561 pub fn slice_to<Q: ?Sized + Ord>(self, max_key: &Q) -> $NodeSlice<'a, K, V> where
1565 // |_1_|_3_|_5_|_7_|
1567 // 0 0 1 1 2 2 3 3 4 index
1569 //\|/ | | | | slice_to(&0); pos = 0
1570 // \___/ | | | slice_to(&2); pos = 1
1571 // \___|___| | | slice_to(&3); pos = 1; result.tail_is_edge = false
1572 // \___|___/ | | slice_to(&4); pos = 2
1573 // \___|___|___/ | slice_to(&6); pos = 3
1574 // \___|___|___|___/ slice_to(&999); pos = 4
1575 let (pos, pos_is_kv) = self.search_linear(max_key);
1576 let pos = pos + if pos_is_kv { 1 } else { 0 };
1578 has_edges: self.has_edges,
1579 edges: if !self.has_edges {
1582 self.edges.$index(.. (pos + 1))
1584 keys: &self.keys[..pos],
1585 vals: self.vals.$index(.. pos),
1586 head_is_edge: self.head_is_edge,
1587 tail_is_edge: !pos_is_kv,
1592 impl<'a, K: 'a, V: 'a> $NodeSlice<'a, K, V> {
1593 /// Returns an iterator over key/value pairs and edges in a slice.
1595 pub fn $iter(self) -> $Traversal<'a, K, V> {
1596 let mut edges = self.edges.$iter();
1597 // Skip edges at both ends, if excluded.
1598 if !self.head_is_edge { edges.next(); }
1599 if !self.tail_is_edge { edges.next_back(); }
1600 // The key iterator is always immutable.
1602 inner: ElemsAndEdges(
1603 self.keys.iter().zip(self.vals.$iter()),
1606 head_is_edge: self.head_is_edge,
1607 tail_is_edge: self.tail_is_edge,
1608 has_edges: self.has_edges,
1615 node_slice_impl!(NodeSlice, Traversal, as_slices_internal, index, iter);
1616 node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, index_mut, iter_mut);