1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // This module represents all the internal representation and logic for a B-Tree's node
12 // with a safe interface, so that BTreeMap itself does not depend on any of these details.
14 pub use self::InsertionResult::*;
15 pub use self::SearchResult::*;
16 pub use self::ForceResult::*;
17 pub use self::TraversalItem::*;
21 use core::borrow::BorrowFrom;
22 use core::cmp::Ordering::{Greater, Less, Equal};
24 use core::ops::{Deref, DerefMut};
25 use core::ptr::Unique;
26 use core::{slice, mem, ptr, cmp, num, raw};
29 /// Represents the result of an Insertion: either the item fit, or the node had to split
30 pub enum InsertionResult<K, V> {
31 /// The inserted element fit
33 /// The inserted element did not fit, so the node was split
34 Split(K, V, Node<K, V>),
37 /// Represents the result of a search for a key in a single node
38 pub enum SearchResult<NodeRef> {
39 /// The element was found at the given index
40 Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
41 /// The element wasn't found, but if it's anywhere, it must be beyond this edge
42 GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
45 /// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
46 #[unsafe_no_drop_flag]
47 pub struct Node<K, V> {
48 // To avoid the need for multiple allocations, we allocate a single buffer with enough space
49 // for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
50 // Despite this, we store three separate pointers to the three "chunks" of the buffer because
51 // the performance drops significantly if the locations of the vals and edges need to be
52 // recalculated upon access.
54 // These will never be null during normal usage of a `Node`. However, to avoid the need for a
55 // drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
60 // In leaf nodes, this will be null, and no space will be allocated for edges.
61 edges: Unique<Node<K, V>>,
63 // At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
64 // `_len + 1` edges. In a leaf node, there will never be any edges.
66 // Note: instead of accessing this field directly, please call the `len()` method, which should
67 // be more stable in the face of representation changes.
70 // FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
71 // be constant throughout the tree. Once a solution to this is found, it might be possible to
72 // also pass down the offsets into the buffer that vals and edges are stored at, removing the
73 // need for those two pointers.
75 // Note: instead of accessing this field directly, please call the `capacity()` method, which
76 // should be more stable in the face of representation changes.
80 /// Rounds up to a multiple of a power of two. Returns the closest multiple
81 /// of `target_alignment` that is higher or equal to `unrounded`.
85 /// Fails if `target_alignment` is not a power of two.
87 fn round_up_to_next(unrounded: uint, target_alignment: uint) -> uint {
88 assert!(num::UnsignedInt::is_power_of_two(target_alignment));
89 (unrounded + target_alignment - 1) & !(target_alignment - 1)
94 assert_eq!(round_up_to_next(0, 4), 0);
95 assert_eq!(round_up_to_next(1, 4), 4);
96 assert_eq!(round_up_to_next(2, 4), 4);
97 assert_eq!(round_up_to_next(3, 4), 4);
98 assert_eq!(round_up_to_next(4, 4), 4);
99 assert_eq!(round_up_to_next(5, 4), 8);
102 // Returns a tuple of (val_offset, edge_offset),
103 // from the start of a mallocated array.
105 fn calculate_offsets(keys_size: uint,
106 vals_size: uint, vals_align: uint,
109 let vals_offset = round_up_to_next(keys_size, vals_align);
110 let end_of_vals = vals_offset + vals_size;
112 let edges_offset = round_up_to_next(end_of_vals, edges_align);
114 (vals_offset, edges_offset)
117 // Returns a tuple of (minimum required alignment, array_size),
118 // from the start of a mallocated array.
120 fn calculate_allocation(keys_size: uint, keys_align: uint,
121 vals_size: uint, vals_align: uint,
122 edges_size: uint, edges_align: uint)
124 let (_, edges_offset) = calculate_offsets(keys_size,
125 vals_size, vals_align,
127 let end_of_edges = edges_offset + edges_size;
129 let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
131 (min_align, end_of_edges)
135 fn test_offset_calculation() {
136 assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
137 assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
138 assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
139 assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
140 assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
141 assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
144 fn calculate_allocation_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
145 let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::min_align_of::<K>());
146 let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::min_align_of::<V>());
147 let (edges_size, edges_align) = if is_leaf {
150 ((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::min_align_of::<Node<K, V>>())
153 calculate_allocation(
154 keys_size, keys_align,
155 vals_size, vals_align,
156 edges_size, edges_align
160 fn calculate_offsets_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
161 let keys_size = capacity * mem::size_of::<K>();
162 let vals_size = capacity * mem::size_of::<V>();
163 let vals_align = mem::min_align_of::<V>();
164 let edges_align = if is_leaf {
167 mem::min_align_of::<Node<K, V>>()
172 vals_size, vals_align,
177 /// An iterator over a slice that owns the elements of the slice but not the allocation.
183 impl<T> RawItems<T> {
184 unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
185 RawItems::from_parts(slice.as_ptr(), slice.len())
188 unsafe fn from_parts(ptr: *const T, len: uint) -> RawItems<T> {
189 if mem::size_of::<T>() == 0 {
192 tail: (ptr as uint + len) as *const T,
197 tail: ptr.offset(len as int),
202 unsafe fn push(&mut self, val: T) {
203 ptr::write(self.tail as *mut T, val);
205 if mem::size_of::<T>() == 0 {
206 self.tail = (self.tail as uint + 1) as *const T;
208 self.tail = self.tail.offset(1);
213 impl<T> Iterator for RawItems<T> {
216 fn next(&mut self) -> Option<T> {
217 if self.head == self.tail {
221 let ret = Some(ptr::read(self.head));
223 if mem::size_of::<T>() == 0 {
224 self.head = (self.head as uint + 1) as *const T;
226 self.head = self.head.offset(1);
235 impl<T> DoubleEndedIterator for RawItems<T> {
236 fn next_back(&mut self) -> Option<T> {
237 if self.head == self.tail {
241 if mem::size_of::<T>() == 0 {
242 self.tail = (self.tail as uint - 1) as *const T;
244 self.tail = self.tail.offset(-1);
247 Some(ptr::read(self.tail))
254 impl<T> Drop for RawItems<T> {
261 impl<K, V> Drop for Node<K, V> {
263 if self.keys.0.is_null() {
264 // We have already cleaned up this node.
268 // Do the actual cleanup.
270 drop(RawItems::from_slice(self.keys()));
271 drop(RawItems::from_slice(self.vals()));
272 drop(RawItems::from_slice(self.edges()));
277 self.keys.0 = ptr::null_mut();
281 impl<K, V> Node<K, V> {
282 /// Make a new internal node. The caller must initialize the result to fix the invariant that
283 /// there are `len() + 1` edges.
284 unsafe fn new_internal(capacity: uint) -> Node<K, V> {
285 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
287 let buffer = heap::allocate(size, alignment);
288 if buffer.is_null() { ::alloc::oom(); }
290 let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
293 keys: Unique(buffer as *mut K),
294 vals: Unique(buffer.offset(vals_offset as int) as *mut V),
295 edges: Unique(buffer.offset(edges_offset as int) as *mut Node<K, V>),
301 /// Make a new leaf node
302 fn new_leaf(capacity: uint) -> Node<K, V> {
303 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
305 let buffer = unsafe { heap::allocate(size, alignment) };
306 if buffer.is_null() { ::alloc::oom(); }
308 let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
311 keys: Unique(buffer as *mut K),
312 vals: Unique(unsafe { buffer.offset(vals_offset as int) as *mut V }),
313 edges: Unique(ptr::null_mut()),
319 unsafe fn destroy(&mut self) {
320 let (alignment, size) =
321 calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
322 heap::deallocate(self.keys.0 as *mut u8, size, alignment);
326 pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
328 mem::transmute(raw::Slice {
329 data: self.keys.0 as *const K,
332 mem::transmute(raw::Slice {
333 data: self.vals.0 as *const V,
340 pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
341 unsafe { mem::transmute(self.as_slices()) }
345 pub fn as_slices_internal<'a>(&'a self) -> (&'a [K], &'a [V], &'a [Node<K, V>]) {
346 let (keys, vals) = self.as_slices();
347 let edges: &[_] = if self.is_leaf() {
351 mem::transmute(raw::Slice {
352 data: self.edges.0 as *const Node<K, V>,
361 pub fn as_slices_internal_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V],
362 &'a mut [Node<K, V>]) {
363 unsafe { mem::transmute(self.as_slices_internal()) }
367 pub fn keys<'a>(&'a self) -> &'a [K] {
372 pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
373 self.as_slices_mut().0
377 pub fn vals<'a>(&'a self) -> &'a [V] {
382 pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
383 self.as_slices_mut().1
387 pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
388 self.as_slices_internal().2
392 pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
393 self.as_slices_internal_mut().2
397 // FIXME(gereeter) Write an efficient clone_from
399 impl<K: Clone, V: Clone> Clone for Node<K, V> {
400 fn clone(&self) -> Node<K, V> {
401 let mut ret = if self.is_leaf() {
402 Node::new_leaf(self.capacity())
404 unsafe { Node::new_internal(self.capacity()) }
408 // For failure safety
409 let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
410 let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
411 let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
413 for key in self.keys().iter() {
414 keys.push(key.clone())
416 for val in self.vals().iter() {
417 vals.push(val.clone())
419 for edge in self.edges().iter() {
420 edges.push(edge.clone())
427 ret._len = self.len();
434 /// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
435 /// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
436 /// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
437 /// accessing the stored values, and moving around the `Node`.
439 /// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
440 /// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
441 /// don't need mutability on both mutable and immutable references. Secondly and more importantly,
442 /// this allows users of the `Handle` API to associate metadata with the reference. This is used in
443 /// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
446 /// # A note on safety
448 /// Unfortunately, the extra power afforded by being generic also means that safety can technically
449 /// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
450 /// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
451 /// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
452 /// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
456 /// struct Nasty<'a> {
457 /// first: &'a Node<uint, uint>,
458 /// second: &'a Node<uint, uint>,
459 /// flag: &'a Cell<bool>,
462 /// impl<'a> Deref for Nasty<'a> {
463 /// type Target = Node<uint, uint>;
465 /// fn deref(&self) -> &Node<uint, uint> {
466 /// if self.flag.get() {
475 /// let flag = Cell::new(false);
476 /// let mut small_node = Node::make_leaf_root(3);
477 /// let mut large_node = Node::make_leaf_root(100);
479 /// for i in range(0, 100) {
480 /// // Insert to the end
481 /// large_node.edge_handle(i).insert_as_leaf(i, i);
484 /// let nasty = Nasty {
485 /// first: &large_node,
486 /// second: &small_node,
490 /// // The handle points at index 75.
491 /// let handle = Node::search(nasty, 75);
493 /// // Now the handle still points at index 75, but on the small node, which has no index 75.
496 /// println!("Uninitialized memory: {}", handle.into_kv());
500 pub struct Handle<NodeRef, Type, NodeType> {
510 // Handle node types.
511 pub enum LeafOrInternal {}
516 impl<K: Ord, V> Node<K, V> {
517 /// Searches for the given key in the node. If it finds an exact match,
518 /// `Found` will be yielded with the matching index. If it doesn't find an exact match,
519 /// `GoDown` will be yielded with the index of the subtree the key must lie in.
520 pub fn search<Sized? Q, NodeRef: Deref<Target=Node<K, V>>>(node: NodeRef, key: &Q)
521 -> SearchResult<NodeRef> where Q: BorrowFrom<K> + Ord {
522 // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
523 // For the B configured as of this writing (B = 6), binary search was *significantly*
525 let (found, index) = node.search_linear(key);
539 fn search_linear<Sized? Q>(&self, key: &Q) -> (bool, uint) where Q: BorrowFrom<K> + Ord {
540 for (i, k) in self.keys().iter().enumerate() {
541 match key.cmp(BorrowFrom::borrow_from(k)) {
543 Equal => return (true, i),
544 Less => return (false, i),
552 impl <K, V> Node<K, V> {
553 /// Make a leaf root from scratch
554 pub fn make_leaf_root(b: uint) -> Node<K, V> {
555 Node::new_leaf(capacity_from_b(b))
558 /// Make an internal root and swap it with an old root
559 pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: uint, key: K, value: V,
561 let node = mem::replace(left_and_out, unsafe { Node::new_internal(capacity_from_b(b)) });
562 left_and_out._len = 1;
564 ptr::write(left_and_out.keys_mut().get_unchecked_mut(0), key);
565 ptr::write(left_and_out.vals_mut().get_unchecked_mut(0), value);
566 ptr::write(left_and_out.edges_mut().get_unchecked_mut(0), node);
567 ptr::write(left_and_out.edges_mut().get_unchecked_mut(1), right);
571 /// How many key-value pairs the node contains
572 pub fn len(&self) -> uint {
576 /// How many key-value pairs the node can fit
577 pub fn capacity(&self) -> uint {
581 /// If the node has any children
582 pub fn is_leaf(&self) -> bool {
583 self.edges.0.is_null()
586 /// if the node has too few elements
587 pub fn is_underfull(&self) -> bool {
588 self.len() < min_load_from_capacity(self.capacity())
591 /// if the node cannot fit any more elements
592 pub fn is_full(&self) -> bool {
593 self.len() == self.capacity()
597 impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
598 /// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
599 /// is very different from `edge` and `edge_mut` because those return children of the node
600 /// returned by `node`.
601 pub fn node(&self) -> &Node<K, V> {
606 impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType> where
607 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
609 /// Converts a handle into one that stores the same information using a raw pointer. This can
610 /// be useful in conjunction with `from_raw` when the type system is insufficient for
611 /// determining the lifetimes of the nodes.
612 pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
614 node: &mut *self.node as *mut _,
620 impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
621 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
622 /// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
623 /// unsafely extending the lifetime of the reference to the `Node`.
624 pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
631 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
632 /// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
633 /// allow unsafely extending the lifetime of the reference to the `Node`.
634 pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
636 node: &mut *self.node,
642 impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
643 /// Turns the handle into a reference to the edge it points at. This is necessary because the
644 /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
645 /// making it more suitable for moving down a chain of nodes.
646 pub fn into_edge(self) -> &'a Node<K, V> {
648 self.node.edges().get_unchecked(self.index)
653 impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
654 /// Turns the handle into a mutable reference to the edge it points at. This is necessary
655 /// because the returned pointer has a larger lifetime than what would be returned by
656 /// `edge_mut`, making it more suitable for moving down a chain of nodes.
657 pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
659 self.node.edges_mut().get_unchecked_mut(self.index)
664 impl<K, V, NodeRef: Deref<Target=Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
665 // This doesn't exist because there are no uses for it,
666 // but is fine to add, analagous to edge_mut.
668 // /// Returns a reference to the edge pointed-to by this handle. This should not be
669 // /// confused with `node`, which references the parent node of what is returned here.
670 // pub fn edge(&self) -> &Node<K, V>
673 pub enum ForceResult<NodeRef, Type> {
674 Leaf(Handle<NodeRef, Type, handle::Leaf>),
675 Internal(Handle<NodeRef, Type, handle::Internal>)
678 impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type> Handle<NodeRef, Type, handle::LeafOrInternal> {
679 /// Figure out whether this handle is pointing to something in a leaf node or to something in
680 /// an internal node, clarifying the type according to the result.
681 pub fn force(self) -> ForceResult<NodeRef, Type> {
682 if self.node.is_leaf() {
695 impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Leaf> where
696 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
698 /// Tries to insert this key-value pair at the given index in this leaf node
699 /// If the node is full, we have to split it.
701 /// Returns a *mut V to the inserted value, because the caller may want this when
702 /// they're done mutating the tree, but we don't want to borrow anything for now.
703 pub fn insert_as_leaf(mut self, key: K, value: V) ->
704 (InsertionResult<K, V>, *mut V) {
705 if !self.node.is_full() {
706 // The element can fit, just insert it
707 (Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
709 // The element can't fit, this node is full. Split it into two nodes.
710 let (new_key, new_val, mut new_right) = self.node.split();
711 let left_len = self.node.len();
714 if self.index <= left_len {
715 self.node.insert_kv(self.index, key, value)
717 // We need to subtract 1 because in splitting we took out new_key and new_val.
718 // Just being in the right node means we are past left_len k/v pairs in the
719 // left node and 1 k/v pair in the parent node.
720 new_right.insert_kv(self.index - left_len - 1, key, value)
724 (Split(new_key, new_val, new_right), ptr)
729 impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Internal> where
730 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
732 /// Returns a mutable reference to the edge pointed-to by this handle. This should not be
733 /// confused with `node`, which references the parent node of what is returned here.
734 pub fn edge_mut(&mut self) -> &mut Node<K, V> {
736 self.node.edges_mut().get_unchecked_mut(self.index)
740 /// Tries to insert this key-value pair at the given index in this internal node
741 /// If the node is full, we have to split it.
742 pub fn insert_as_internal(mut self, key: K, value: V, right: Node<K, V>)
743 -> InsertionResult<K, V> {
744 if !self.node.is_full() {
745 // The element can fit, just insert it
747 self.node.insert_kv(self.index, key, value);
748 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
752 // The element can't fit, this node is full. Split it into two nodes.
753 let (new_key, new_val, mut new_right) = self.node.split();
754 let left_len = self.node.len();
756 if self.index <= left_len {
758 self.node.insert_kv(self.index, key, value);
759 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
763 // The -1 here is for the same reason as in insert_as_internal - because we
764 // split, there are actually left_len + 1 k/v pairs before the right node, with
765 // the extra 1 being put in the parent.
766 new_right.insert_kv(self.index - left_len - 1, key, value);
767 new_right.insert_edge(self.index - left_len, right);
771 Split(new_key, new_val, new_right)
775 /// Handle an underflow in this node's child. We favour handling "to the left" because we know
776 /// we're empty, but our neighbour can be full. Handling to the left means when we choose to
777 /// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
778 /// (always slow, but at least faster since we know we're half-empty).
779 /// Handling "to the right" reverses these roles. Of course, we merge whenever possible
780 /// because we want dense nodes, and merging is about equal work regardless of direction.
781 pub fn handle_underflow(mut self) {
784 self.handle_underflow_to_left();
786 self.handle_underflow_to_right();
791 /// Right is underflowed. Tries to steal from left,
792 /// but merges left and right if left is low too.
793 unsafe fn handle_underflow_to_left(&mut self) {
794 let left_len = self.node.edges()[self.index - 1].len();
795 if left_len > min_load_from_capacity(self.node.capacity()) {
796 self.left_kv().steal_rightward();
798 self.left_kv().merge_children();
802 /// Left is underflowed. Tries to steal from the right,
803 /// but merges left and right if right is low too.
804 unsafe fn handle_underflow_to_right(&mut self) {
805 let right_len = self.node.edges()[self.index + 1].len();
806 if right_len > min_load_from_capacity(self.node.capacity()) {
807 self.right_kv().steal_leftward();
809 self.right_kv().merge_children();
814 impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType> where
815 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
817 /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
818 /// This is unsafe because the handle might point to the first edge in the node, which has no
819 /// pair to its left.
820 unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
822 node: &mut *self.node,
823 index: self.index - 1
827 /// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
828 /// This is unsafe because the handle might point to the last edge in the node, which has no
829 /// pair to its right.
830 unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
832 node: &mut *self.node,
838 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
839 /// Turns the handle into references to the key and value it points at. This is necessary
840 /// because the returned pointers have larger lifetimes than what would be returned by `key`
842 pub fn into_kv(self) -> (&'a K, &'a V) {
843 let (keys, vals) = self.node.as_slices();
846 keys.get_unchecked(self.index),
847 vals.get_unchecked(self.index)
853 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
854 /// Turns the handle into mutable references to the key and value it points at. This is
855 /// necessary because the returned pointers have larger lifetimes than what would be returned
856 /// by `key_mut` or `val_mut`.
857 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
858 let (keys, vals) = self.node.as_slices_mut();
861 keys.get_unchecked_mut(self.index),
862 vals.get_unchecked_mut(self.index)
867 /// Convert this handle into one pointing at the edge immediately to the left of the key/value
868 /// pair pointed-to by this handle. This is useful because it returns a reference with larger
869 /// lifetime than `left_edge`.
870 pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
872 node: &mut *self.node,
878 impl<'a, K: 'a, V: 'a, NodeRef: Deref<Target=Node<K, V>> + 'a, NodeType> Handle<NodeRef, handle::KV,
880 // These are fine to include, but are currently unneeded.
882 // /// Returns a reference to the key pointed-to by this handle. This doesn't return a
883 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
885 // pub fn key(&'a self) -> &'a K {
886 // unsafe { self.node.keys().get_unchecked(self.index) }
889 // /// Returns a reference to the value pointed-to by this handle. This doesn't return a
890 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
892 // pub fn val(&'a self) -> &'a V {
893 // unsafe { self.node.vals().get_unchecked(self.index) }
897 impl<'a, K: 'a, V: 'a, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
898 NodeRef: 'a + Deref<Target=Node<K, V>> + DerefMut,
900 /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
901 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
903 pub fn key_mut(&'a mut self) -> &'a mut K {
904 unsafe { self.node.keys_mut().get_unchecked_mut(self.index) }
907 /// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
908 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
910 pub fn val_mut(&'a mut self) -> &'a mut V {
911 unsafe { self.node.vals_mut().get_unchecked_mut(self.index) }
915 impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
916 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
918 /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
919 /// to by this handle.
920 pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
922 node: &mut *self.node,
927 /// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
928 /// to by this handle.
929 pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
931 node: &mut *self.node,
932 index: self.index + 1
937 impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Leaf> where
938 NodeRef: Deref<Target=Node<K, V>> + DerefMut,
940 /// Removes the key/value pair at the handle's location.
942 /// # Panics (in debug build)
944 /// Panics if the node containing the pair is not a leaf node.
945 pub fn remove_as_leaf(mut self) -> (K, V) {
946 unsafe { self.node.remove_kv(self.index) }
950 impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Internal> where
951 NodeRef: Deref<Target=Node<K, V>> + DerefMut
953 /// Steal! Stealing is roughly analogous to a binary tree rotation.
954 /// In this case, we're "rotating" right.
955 unsafe fn steal_rightward(&mut self) {
956 // Take the biggest stuff off left
957 let (mut key, mut val, edge) = {
958 let mut left_handle = self.left_edge();
959 let left = left_handle.edge_mut();
960 let (key, val) = left.pop_kv();
961 let edge = if left.is_leaf() {
964 Some(left.pop_edge())
970 // Swap the parent's separating key-value pair with left's
971 mem::swap(&mut key, self.key_mut());
972 mem::swap(&mut val, self.val_mut());
974 // Put them at the start of right
975 let mut right_handle = self.right_edge();
976 let right = right_handle.edge_mut();
977 right.insert_kv(0, key, val);
979 Some(edge) => right.insert_edge(0, edge),
984 /// Steal! Stealing is roughly analogous to a binary tree rotation.
985 /// In this case, we're "rotating" left.
986 unsafe fn steal_leftward(&mut self) {
987 // Take the smallest stuff off right
988 let (mut key, mut val, edge) = {
989 let mut right_handle = self.right_edge();
990 let right = right_handle.edge_mut();
991 let (key, val) = right.remove_kv(0);
992 let edge = if right.is_leaf() {
995 Some(right.remove_edge(0))
1001 // Swap the parent's separating key-value pair with right's
1002 mem::swap(&mut key, self.key_mut());
1003 mem::swap(&mut val, self.val_mut());
1005 // Put them at the end of left
1006 let mut left_handle = self.left_edge();
1007 let left = left_handle.edge_mut();
1008 left.push_kv(key, val);
1010 Some(edge) => left.push_edge(edge),
1015 /// Merge! Smooshes left and right into one node, along with the key-value
1016 /// pair that separated them in their parent.
1017 unsafe fn merge_children(mut self) {
1018 // Permanently remove right's index, and the key-value pair that separates
1020 let (key, val) = self.node.remove_kv(self.index);
1021 let right = self.node.remove_edge(self.index + 1);
1023 // Give left right's stuff.
1024 self.left_edge().edge_mut()
1025 .absorb(key, val, right);
1029 impl<K, V> Node<K, V> {
1030 /// Returns the mutable handle pointing to the key/value pair at a given index.
1032 /// # Panics (in debug build)
1034 /// Panics if the given index is out of bounds.
1035 pub fn kv_handle(&mut self, index: uint) -> Handle<&mut Node<K, V>, handle::KV,
1036 handle::LeafOrInternal> {
1037 // Necessary for correctness, but in a private module
1038 debug_assert!(index < self.len(), "kv_handle index out of bounds");
1045 pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
1046 let is_leaf = self.is_leaf();
1047 let (keys, vals, edges) = self.as_slices_internal();
1049 inner: ElemsAndEdges(
1050 keys.iter().zip(vals.iter()),
1055 has_edges: !is_leaf,
1059 pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
1060 let is_leaf = self.is_leaf();
1061 let (keys, vals, edges) = self.as_slices_internal_mut();
1063 inner: ElemsAndEdges(
1064 keys.iter().zip(vals.iter_mut()),
1069 has_edges: !is_leaf,
1073 pub fn into_iter(self) -> MoveTraversal<K, V> {
1075 let ret = MoveTraversal {
1076 inner: MoveTraversalImpl {
1077 keys: RawItems::from_slice(self.keys()),
1078 vals: RawItems::from_slice(self.vals()),
1079 edges: RawItems::from_slice(self.edges()),
1081 ptr: self.keys.0 as *mut u8,
1082 capacity: self.capacity(),
1083 is_leaf: self.is_leaf()
1087 has_edges: !self.is_leaf(),
1094 /// When a node has no keys or values and only a single edge, extract that edge.
1095 pub fn hoist_lone_child(&mut self) {
1096 // Necessary for correctness, but in a private module
1097 debug_assert!(self.len() == 0);
1098 debug_assert!(!self.is_leaf());
1101 let ret = ptr::read(self.edges().get_unchecked(0));
1103 ptr::write(self, ret);
1108 // Vector functions (all unchecked)
1109 impl<K, V> Node<K, V> {
1110 // This must be followed by push_edge on an internal node.
1112 unsafe fn push_kv(&mut self, key: K, val: V) {
1113 let len = self.len();
1115 ptr::write(self.keys_mut().get_unchecked_mut(len), key);
1116 ptr::write(self.vals_mut().get_unchecked_mut(len), val);
1121 // This can only be called immediately after a call to push_kv.
1123 unsafe fn push_edge(&mut self, edge: Node<K, V>) {
1124 let len = self.len();
1126 ptr::write(self.edges_mut().get_unchecked_mut(len), edge);
1129 // This must be followed by insert_edge on an internal node.
1131 unsafe fn insert_kv(&mut self, index: uint, key: K, val: V) -> &mut V {
1133 self.keys_mut().as_mut_ptr().offset(index as int + 1),
1134 self.keys().as_ptr().offset(index as int),
1138 self.vals_mut().as_mut_ptr().offset(index as int + 1),
1139 self.vals().as_ptr().offset(index as int),
1143 ptr::write(self.keys_mut().get_unchecked_mut(index), key);
1144 ptr::write(self.vals_mut().get_unchecked_mut(index), val);
1148 self.vals_mut().get_unchecked_mut(index)
1151 // This can only be called immediately after a call to insert_kv.
1153 unsafe fn insert_edge(&mut self, index: uint, edge: Node<K, V>) {
1155 self.edges_mut().as_mut_ptr().offset(index as int + 1),
1156 self.edges().as_ptr().offset(index as int),
1159 ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
1162 // This must be followed by pop_edge on an internal node.
1164 unsafe fn pop_kv(&mut self) -> (K, V) {
1165 let key = ptr::read(self.keys().get_unchecked(self.len() - 1));
1166 let val = ptr::read(self.vals().get_unchecked(self.len() - 1));
1173 // This can only be called immediately after a call to pop_kv.
1175 unsafe fn pop_edge(&mut self) -> Node<K, V> {
1176 let edge = ptr::read(self.edges().get_unchecked(self.len() + 1));
1181 // This must be followed by remove_edge on an internal node.
1183 unsafe fn remove_kv(&mut self, index: uint) -> (K, V) {
1184 let key = ptr::read(self.keys().get_unchecked(index));
1185 let val = ptr::read(self.vals().get_unchecked(index));
1188 self.keys_mut().as_mut_ptr().offset(index as int),
1189 self.keys().as_ptr().offset(index as int + 1),
1190 self.len() - index - 1
1193 self.vals_mut().as_mut_ptr().offset(index as int),
1194 self.vals().as_ptr().offset(index as int + 1),
1195 self.len() - index - 1
1203 // This can only be called immediately after a call to remove_kv.
1205 unsafe fn remove_edge(&mut self, index: uint) -> Node<K, V> {
1206 let edge = ptr::read(self.edges().get_unchecked(index));
1209 self.edges_mut().as_mut_ptr().offset(index as int),
1210 self.edges().as_ptr().offset(index as int + 1),
1211 self.len() - index + 1
1218 // Private implementation details
1219 impl<K, V> Node<K, V> {
1220 /// Node is full, so split it into two nodes, and yield the middle-most key-value pair
1221 /// because we have one too many, and our parent now has one too few
1222 fn split(&mut self) -> (K, V, Node<K, V>) {
1223 // Necessary for correctness, but in a private funtion
1224 debug_assert!(self.len() > 0);
1226 let mut right = if self.is_leaf() {
1227 Node::new_leaf(self.capacity())
1229 unsafe { Node::new_internal(self.capacity()) }
1233 right._len = self.len() / 2;
1234 let right_offset = self.len() - right.len();
1235 ptr::copy_nonoverlapping_memory(
1236 right.keys_mut().as_mut_ptr(),
1237 self.keys().as_ptr().offset(right_offset as int),
1240 ptr::copy_nonoverlapping_memory(
1241 right.vals_mut().as_mut_ptr(),
1242 self.vals().as_ptr().offset(right_offset as int),
1245 if !self.is_leaf() {
1246 ptr::copy_nonoverlapping_memory(
1247 right.edges_mut().as_mut_ptr(),
1248 self.edges().as_ptr().offset(right_offset as int),
1253 let key = ptr::read(self.keys().get_unchecked(right_offset - 1));
1254 let val = ptr::read(self.vals().get_unchecked(right_offset - 1));
1256 self._len = right_offset - 1;
1262 /// Take all the values from right, seperated by the given key and value
1263 fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
1264 // Necessary for correctness, but in a private function
1265 // Just as a sanity check, make sure we can fit this guy in
1266 debug_assert!(self.len() + right.len() <= self.capacity());
1267 debug_assert!(self.is_leaf() == right.is_leaf());
1270 let old_len = self.len();
1271 self._len += right.len() + 1;
1273 ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
1274 ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
1276 ptr::copy_nonoverlapping_memory(
1277 self.keys_mut().as_mut_ptr().offset(old_len as int + 1),
1278 right.keys().as_ptr(),
1281 ptr::copy_nonoverlapping_memory(
1282 self.vals_mut().as_mut_ptr().offset(old_len as int + 1),
1283 right.vals().as_ptr(),
1286 if !self.is_leaf() {
1287 ptr::copy_nonoverlapping_memory(
1288 self.edges_mut().as_mut_ptr().offset(old_len as int + 1),
1289 right.edges().as_ptr(),
1300 /// Get the capacity of a node from the order of the parent B-Tree
1301 fn capacity_from_b(b: uint) -> uint {
1305 /// Get the minimum load of a node from its capacity
1306 fn min_load_from_capacity(cap: uint) -> uint {
1311 /// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
1312 /// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
1313 /// and a pair of `Iterator`s would require two independent destructors.
1314 trait TraversalImpl<K, V, E> {
1315 fn next_kv(&mut self) -> Option<(K, V)>;
1316 fn next_kv_back(&mut self) -> Option<(K, V)>;
1318 fn next_edge(&mut self) -> Option<E>;
1319 fn next_edge_back(&mut self) -> Option<E>;
1322 /// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
1323 /// as no deallocation needs to be done.
1324 struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
1326 impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
1327 TraversalImpl<K, V, E> for ElemsAndEdges<Elems, Edges>
1328 where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
1331 fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
1332 fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
1334 fn next_edge(&mut self) -> Option<E> { self.1.next() }
1335 fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
1338 /// A `TraversalImpl` taking a `Node` by value.
1339 struct MoveTraversalImpl<K, V> {
1342 edges: RawItems<Node<K, V>>,
1344 // For deallocation when we are done iterating.
1350 impl<K, V> TraversalImpl<K, V, Node<K, V>> for MoveTraversalImpl<K, V> {
1351 fn next_kv(&mut self) -> Option<(K, V)> {
1352 match (self.keys.next(), self.vals.next()) {
1353 (Some(k), Some(v)) => Some((k, v)),
1358 fn next_kv_back(&mut self) -> Option<(K, V)> {
1359 match (self.keys.next_back(), self.vals.next_back()) {
1360 (Some(k), Some(v)) => Some((k, v)),
1365 fn next_edge(&mut self) -> Option<Node<K, V>> {
1366 // Necessary for correctness, but in a private module
1367 debug_assert!(!self.is_leaf);
1371 fn next_edge_back(&mut self) -> Option<Node<K, V>> {
1372 // Necessary for correctness, but in a private module
1373 debug_assert!(!self.is_leaf);
1374 self.edges.next_back()
1378 #[unsafe_destructor]
1379 impl<K, V> Drop for MoveTraversalImpl<K, V> {
1380 fn drop(&mut self) {
1381 // We need to cleanup the stored values manually, as the RawItems destructor would run
1382 // after our deallocation.
1383 for _ in self.keys {}
1384 for _ in self.vals {}
1385 for _ in self.edges {}
1387 let (alignment, size) =
1388 calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
1389 unsafe { heap::deallocate(self.ptr, size, alignment) };
1393 /// An abstraction over all the different kinds of traversals a node supports
1394 struct AbsTraversal<Impl> {
1401 /// A single atomic step in a traversal. Either an element is visited, or an edge is followed
1402 pub enum TraversalItem<K, V, E> {
1407 /// A traversal over a node's entries and edges
1408 pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1409 slice::Iter<'a, V>>,
1410 slice::Iter<'a, Node<K, V>>>>;
1412 /// A mutable traversal over a node's entries and edges
1413 pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1414 slice::IterMut<'a, V>>,
1415 slice::IterMut<'a, Node<K, V>>>>;
1417 /// An owning traversal over a node's entries and edges
1418 pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
1421 impl<K, V, E, Impl: TraversalImpl<K, V, E>> Iterator for AbsTraversal<Impl> {
1422 type Item = TraversalItem<K, V, E>;
1424 fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
1425 let head_is_edge = self.head_is_edge;
1426 self.head_is_edge = !head_is_edge;
1428 if head_is_edge && self.has_edges {
1429 self.inner.next_edge().map(|node| Edge(node))
1431 self.inner.next_kv().map(|(k, v)| Elem(k, v))
1436 impl<K, V, E, Impl: TraversalImpl<K, V, E>> DoubleEndedIterator for AbsTraversal<Impl> {
1437 fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
1438 let tail_is_edge = self.tail_is_edge;
1439 self.tail_is_edge = !tail_is_edge;
1441 if tail_is_edge && self.has_edges {
1442 self.inner.next_edge_back().map(|node| Edge(node))
1444 self.inner.next_kv_back().map(|(k, v)| Elem(k, v))