1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // This module represents all the internal representation and logic for a B-Tree's node
12 // with a safe interface, so that BTreeMap itself does not depend on any of these details.
14 pub use self::InsertionResult::*;
15 pub use self::SearchResult::*;
16 pub use self::ForceResult::*;
17 pub use self::TraversalItem::*;
21 use core::{slice, mem, ptr, cmp, num, raw};
23 use core::borrow::BorrowFrom;
26 /// Represents the result of an Insertion: either the item fit, or the node had to split
27 pub enum InsertionResult<K, V> {
28 /// The inserted element fit
30 /// The inserted element did not fit, so the node was split
31 Split(K, V, Node<K, V>),
34 /// Represents the result of a search for a key in a single node
35 pub enum SearchResult<NodeRef> {
36 /// The element was found at the given index
37 Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
38 /// The element wasn't found, but if it's anywhere, it must be beyond this edge
39 GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
42 /// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
43 #[unsafe_no_drop_flag]
44 pub struct Node<K, V> {
45 // To avoid the need for multiple allocations, we allocate a single buffer with enough space
46 // for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
47 // Despite this, we store three separate pointers to the three "chunks" of the buffer because
48 // the performance drops significantly if the locations of the vals and edges need to be
49 // recalculated upon access.
51 // These will never be null during normal usage of a `Node`. However, to avoid the need for a
52 // drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
57 // In leaf nodes, this will be null, and no space will be allocated for edges.
58 edges: *mut Node<K, V>,
60 // At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
61 // `_len + 1` edges. In a leaf node, there will never be any edges.
63 // Note: instead of accessing this field directly, please call the `len()` method, which should
64 // be more stable in the face of representation changes.
67 // FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
68 // be constant throughout the tree. Once a solution to this is found, it might be possible to
69 // also pass down the offsets into the buffer that vals and edges are stored at, removing the
70 // need for those two pointers.
72 // Note: instead of accessing this field directly, please call the `capacity()` method, which
73 // should be more stable in the face of representation changes.
77 /// Rounds up to a multiple of a power of two. Returns the closest multiple
78 /// of `target_alignment` that is higher or equal to `unrounded`.
82 /// Fails if `target_alignment` is not a power of two.
84 fn round_up_to_next(unrounded: uint, target_alignment: uint) -> uint {
85 assert!(num::UnsignedInt::is_power_of_two(target_alignment));
86 (unrounded + target_alignment - 1) & !(target_alignment - 1)
91 assert_eq!(round_up_to_next(0, 4), 0);
92 assert_eq!(round_up_to_next(1, 4), 4);
93 assert_eq!(round_up_to_next(2, 4), 4);
94 assert_eq!(round_up_to_next(3, 4), 4);
95 assert_eq!(round_up_to_next(4, 4), 4);
96 assert_eq!(round_up_to_next(5, 4), 8);
99 // Returns a tuple of (val_offset, edge_offset),
100 // from the start of a mallocated array.
102 fn calculate_offsets(keys_size: uint,
103 vals_size: uint, vals_align: uint,
106 let vals_offset = round_up_to_next(keys_size, vals_align);
107 let end_of_vals = vals_offset + vals_size;
109 let edges_offset = round_up_to_next(end_of_vals, edges_align);
111 (vals_offset, edges_offset)
114 // Returns a tuple of (minimum required alignment, array_size),
115 // from the start of a mallocated array.
117 fn calculate_allocation(keys_size: uint, keys_align: uint,
118 vals_size: uint, vals_align: uint,
119 edges_size: uint, edges_align: uint)
121 let (_, edges_offset) = calculate_offsets(keys_size,
122 vals_size, vals_align,
124 let end_of_edges = edges_offset + edges_size;
126 let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
128 (min_align, end_of_edges)
132 fn test_offset_calculation() {
133 assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
134 assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
135 assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
136 assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
137 assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
138 assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
141 fn calculate_allocation_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
142 let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::min_align_of::<K>());
143 let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::min_align_of::<V>());
144 let (edges_size, edges_align) = if is_leaf {
147 ((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::min_align_of::<Node<K, V>>())
150 calculate_allocation(
151 keys_size, keys_align,
152 vals_size, vals_align,
153 edges_size, edges_align
157 fn calculate_offsets_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
158 let keys_size = capacity * mem::size_of::<K>();
159 let vals_size = capacity * mem::size_of::<V>();
160 let vals_align = mem::min_align_of::<V>();
161 let edges_align = if is_leaf {
164 mem::min_align_of::<Node<K, V>>()
169 vals_size, vals_align,
174 /// An iterator over a slice that owns the elements of the slice but not the allocation.
180 impl<T> RawItems<T> {
181 unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
182 RawItems::from_parts(slice.as_ptr(), slice.len())
185 unsafe fn from_parts(ptr: *const T, len: uint) -> RawItems<T> {
186 if mem::size_of::<T>() == 0 {
189 tail: (ptr as uint + len) as *const T,
194 tail: ptr.offset(len as int),
199 unsafe fn push(&mut self, val: T) {
200 ptr::write(self.tail as *mut T, val);
202 if mem::size_of::<T>() == 0 {
203 self.tail = (self.tail as uint + 1) as *const T;
205 self.tail = self.tail.offset(1);
210 impl<T> Iterator<T> for RawItems<T> {
211 fn next(&mut self) -> Option<T> {
212 if self.head == self.tail {
216 let ret = Some(ptr::read(self.head));
218 if mem::size_of::<T>() == 0 {
219 self.head = (self.head as uint + 1) as *const T;
221 self.head = self.head.offset(1);
230 impl<T> DoubleEndedIterator<T> for RawItems<T> {
231 fn next_back(&mut self) -> Option<T> {
232 if self.head == self.tail {
236 if mem::size_of::<T>() == 0 {
237 self.tail = (self.tail as uint - 1) as *const T;
239 self.tail = self.tail.offset(-1);
242 Some(ptr::read(self.tail))
249 impl<T> Drop for RawItems<T> {
256 impl<K, V> Drop for Node<K, V> {
258 if self.keys.is_null() {
259 // We have already cleaned up this node.
263 // Do the actual cleanup.
265 drop(RawItems::from_slice(self.keys()));
266 drop(RawItems::from_slice(self.vals()));
267 drop(RawItems::from_slice(self.edges()));
272 self.keys = ptr::null_mut();
276 impl<K, V> Node<K, V> {
277 /// Make a new internal node. The caller must initialize the result to fix the invariant that
278 /// there are `len() + 1` edges.
279 unsafe fn new_internal(capacity: uint) -> Node<K, V> {
280 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
282 let buffer = heap::allocate(size, alignment);
283 if buffer.is_null() { ::alloc::oom(); }
285 let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
288 keys: buffer as *mut K,
289 vals: buffer.offset(vals_offset as int) as *mut V,
290 edges: buffer.offset(edges_offset as int) as *mut Node<K, V>,
296 /// Make a new leaf node
297 fn new_leaf(capacity: uint) -> Node<K, V> {
298 let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
300 let buffer = unsafe { heap::allocate(size, alignment) };
301 if buffer.is_null() { ::alloc::oom(); }
303 let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
306 keys: buffer as *mut K,
307 vals: unsafe { buffer.offset(vals_offset as int) as *mut V },
308 edges: ptr::null_mut(),
314 unsafe fn destroy(&mut self) {
315 let (alignment, size) =
316 calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
317 heap::deallocate(self.keys as *mut u8, size, alignment);
321 pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
323 mem::transmute(raw::Slice {
324 data: self.keys as *const K,
327 mem::transmute(raw::Slice {
328 data: self.vals as *const V,
335 pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
336 unsafe { mem::transmute(self.as_slices()) }
340 pub fn as_slices_internal<'a>(&'a self) -> (&'a [K], &'a [V], &'a [Node<K, V>]) {
341 let (keys, vals) = self.as_slices();
342 let edges: &[_] = if self.is_leaf() {
346 mem::transmute(raw::Slice {
347 data: self.edges as *const Node<K, V>,
356 pub fn as_slices_internal_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V],
357 &'a mut [Node<K, V>]) {
358 unsafe { mem::transmute(self.as_slices_internal()) }
362 pub fn keys<'a>(&'a self) -> &'a [K] {
367 pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
368 self.as_slices_mut().0
372 pub fn vals<'a>(&'a self) -> &'a [V] {
377 pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
378 self.as_slices_mut().1
382 pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
383 self.as_slices_internal().2
387 pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
388 self.as_slices_internal_mut().2
392 // FIXME(gereeter) Write an efficient clone_from
394 impl<K: Clone, V: Clone> Clone for Node<K, V> {
395 fn clone(&self) -> Node<K, V> {
396 let mut ret = if self.is_leaf() {
397 Node::new_leaf(self.capacity())
399 unsafe { Node::new_internal(self.capacity()) }
403 // For failure safety
404 let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
405 let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
406 let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
408 for key in self.keys().iter() {
409 keys.push(key.clone())
411 for val in self.vals().iter() {
412 vals.push(val.clone())
414 for edge in self.edges().iter() {
415 edges.push(edge.clone())
422 ret._len = self.len();
429 /// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
430 /// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
431 /// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
432 /// accessing the stored values, and moving around the `Node`.
434 /// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
435 /// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
436 /// don't need mutability on both mutable and immutable references. Secondly and more importantly,
437 /// this allows users of the `Handle` API to associate metadata with the reference. This is used in
438 /// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
441 /// # A note on safety
443 /// Unfortunately, the extra power afforded by being generic also means that safety can technically
444 /// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
445 /// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
446 /// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
447 /// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
451 /// struct Nasty<'a> {
452 /// first: &'a Node<uint, uint>,
453 /// second: &'a Node<uint, uint>,
454 /// flag: &'a Cell<bool>,
457 /// impl<'a> Deref<Node<uint, uint>> for Nasty<'a> {
458 /// fn deref(&self) -> &Node<uint, uint> {
459 /// if self.flag.get() {
468 /// let flag = Cell::new(false);
469 /// let mut small_node = Node::make_leaf_root(3);
470 /// let mut large_node = Node::make_leaf_root(100);
472 /// for i in range(0, 100) {
473 /// // Insert to the end
474 /// large_node.edge_handle(i).insert_as_leaf(i, i);
477 /// let nasty = Nasty {
478 /// first: &large_node,
479 /// second: &small_node,
483 /// // The handle points at index 75.
484 /// let handle = Node::search(nasty, 75);
486 /// // Now the handle still points at index 75, but on the small node, which has no index 75.
489 /// println!("Uninitialized memory: {}", handle.into_kv());
493 pub struct Handle<NodeRef, Type, NodeType> {
503 // Handle node types.
504 pub enum LeafOrInternal {}
509 impl<K: Ord, V> Node<K, V> {
510 /// Searches for the given key in the node. If it finds an exact match,
511 /// `Found` will be yielded with the matching index. If it doesn't find an exact match,
512 /// `GoDown` will be yielded with the index of the subtree the key must lie in.
513 pub fn search<Sized? Q, NodeRef: Deref<Node<K, V>>>(node: NodeRef, key: &Q)
514 -> SearchResult<NodeRef> where Q: BorrowFrom<K> + Ord {
515 // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
516 // For the B configured as of this writing (B = 6), binary search was *significantly*
518 let (found, index) = node.search_linear(key);
532 fn search_linear<Sized? Q>(&self, key: &Q) -> (bool, uint) where Q: BorrowFrom<K> + Ord {
533 for (i, k) in self.keys().iter().enumerate() {
534 match key.cmp(BorrowFrom::borrow_from(k)) {
536 Equal => return (true, i),
537 Less => return (false, i),
545 impl <K, V> Node<K, V> {
546 /// Make a leaf root from scratch
547 pub fn make_leaf_root(b: uint) -> Node<K, V> {
548 Node::new_leaf(capacity_from_b(b))
551 /// Make an internal root and swap it with an old root
552 pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: uint, key: K, value: V,
554 let node = mem::replace(left_and_out, unsafe { Node::new_internal(capacity_from_b(b)) });
555 left_and_out._len = 1;
557 ptr::write(left_and_out.keys_mut().unsafe_mut(0), key);
558 ptr::write(left_and_out.vals_mut().unsafe_mut(0), value);
559 ptr::write(left_and_out.edges_mut().unsafe_mut(0), node);
560 ptr::write(left_and_out.edges_mut().unsafe_mut(1), right);
564 /// How many key-value pairs the node contains
565 pub fn len(&self) -> uint {
569 /// How many key-value pairs the node can fit
570 pub fn capacity(&self) -> uint {
574 /// If the node has any children
575 pub fn is_leaf(&self) -> bool {
579 /// if the node has too few elements
580 pub fn is_underfull(&self) -> bool {
581 self.len() < min_load_from_capacity(self.capacity())
584 /// if the node cannot fit any more elements
585 pub fn is_full(&self) -> bool {
586 self.len() == self.capacity()
590 impl<K, V, NodeRef: Deref<Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
591 /// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
592 /// is very different from `edge` and `edge_mut` because those return children of the node
593 /// returned by `node`.
594 pub fn node(&self) -> &Node<K, V> {
599 impl<K, V, NodeRef: DerefMut<Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
600 /// Converts a handle into one that stores the same information using a raw pointer. This can
601 /// be useful in conjunction with `from_raw` when the type system is insufficient for
602 /// determining the lifetimes of the nodes.
603 pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
605 node: &mut *self.node as *mut _,
611 impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
612 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
613 /// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
614 /// unsafely extending the lifetime of the reference to the `Node`.
615 pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
622 /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
623 /// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
624 /// allow unsafely extending the lifetime of the reference to the `Node`.
625 pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
627 node: &mut *self.node,
633 impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
634 /// Turns the handle into a reference to the edge it points at. This is necessary because the
635 /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
636 /// making it more suitable for moving down a chain of nodes.
637 pub fn into_edge(self) -> &'a Node<K, V> {
639 self.node.edges().unsafe_get(self.index)
644 impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
645 /// Turns the handle into a mutable reference to the edge it points at. This is necessary
646 /// because the returned pointer has a larger lifetime than what would be returned by
647 /// `edge_mut`, making it more suitable for moving down a chain of nodes.
648 pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
650 self.node.edges_mut().unsafe_mut(self.index)
655 impl<K, V, NodeRef: Deref<Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
656 // This doesn't exist because there are no uses for it,
657 // but is fine to add, analagous to edge_mut.
659 // /// Returns a reference to the edge pointed-to by this handle. This should not be
660 // /// confused with `node`, which references the parent node of what is returned here.
661 // pub fn edge(&self) -> &Node<K, V>
664 pub enum ForceResult<NodeRef, Type> {
665 Leaf(Handle<NodeRef, Type, handle::Leaf>),
666 Internal(Handle<NodeRef, Type, handle::Internal>)
669 impl<K, V, NodeRef: Deref<Node<K, V>>, Type> Handle<NodeRef, Type, handle::LeafOrInternal> {
670 /// Figure out whether this handle is pointing to something in a leaf node or to something in
671 /// an internal node, clarifying the type according to the result.
672 pub fn force(self) -> ForceResult<NodeRef, Type> {
673 if self.node.is_leaf() {
687 impl<K, V, NodeRef: DerefMut<Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Leaf> {
688 /// Tries to insert this key-value pair at the given index in this leaf node
689 /// If the node is full, we have to split it.
691 /// Returns a *mut V to the inserted value, because the caller may want this when
692 /// they're done mutating the tree, but we don't want to borrow anything for now.
693 pub fn insert_as_leaf(mut self, key: K, value: V) ->
694 (InsertionResult<K, V>, *mut V) {
695 if !self.node.is_full() {
696 // The element can fit, just insert it
697 (Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
699 // The element can't fit, this node is full. Split it into two nodes.
700 let (new_key, new_val, mut new_right) = self.node.split();
701 let left_len = self.node.len();
704 if self.index <= left_len {
705 self.node.insert_kv(self.index, key, value)
707 // We need to subtract 1 because in splitting we took out new_key and new_val.
708 // Just being in the right node means we are past left_len k/v pairs in the
709 // left node and 1 k/v pair in the parent node.
710 new_right.insert_kv(self.index - left_len - 1, key, value)
714 (Split(new_key, new_val, new_right), ptr)
719 impl<K, V, NodeRef: DerefMut<Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
720 /// Returns a mutable reference to the edge pointed-to by this handle. This should not be
721 /// confused with `node`, which references the parent node of what is returned here.
722 pub fn edge_mut(&mut self) -> &mut Node<K, V> {
724 self.node.edges_mut().unsafe_mut(self.index)
728 /// Tries to insert this key-value pair at the given index in this internal node
729 /// If the node is full, we have to split it.
730 pub fn insert_as_internal(mut self, key: K, value: V, right: Node<K, V>)
731 -> InsertionResult<K, V> {
732 if !self.node.is_full() {
733 // The element can fit, just insert it
735 self.node.insert_kv(self.index, key, value);
736 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
740 // The element can't fit, this node is full. Split it into two nodes.
741 let (new_key, new_val, mut new_right) = self.node.split();
742 let left_len = self.node.len();
744 if self.index <= left_len {
746 self.node.insert_kv(self.index, key, value);
747 self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
751 // The -1 here is for the same reason as in insert_as_internal - because we
752 // split, there are actually left_len + 1 k/v pairs before the right node, with
753 // the extra 1 being put in the parent.
754 new_right.insert_kv(self.index - left_len - 1, key, value);
755 new_right.insert_edge(self.index - left_len, right);
759 Split(new_key, new_val, new_right)
763 /// Handle an underflow in this node's child. We favour handling "to the left" because we know
764 /// we're empty, but our neighbour can be full. Handling to the left means when we choose to
765 /// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
766 /// (always slow, but at least faster since we know we're half-empty).
767 /// Handling "to the right" reverses these roles. Of course, we merge whenever possible
768 /// because we want dense nodes, and merging is about equal work regardless of direction.
769 pub fn handle_underflow(mut self) {
772 self.handle_underflow_to_left();
774 self.handle_underflow_to_right();
779 /// Right is underflowed. Tries to steal from left,
780 /// but merges left and right if left is low too.
781 unsafe fn handle_underflow_to_left(&mut self) {
782 let left_len = self.node.edges()[self.index - 1].len();
783 if left_len > min_load_from_capacity(self.node.capacity()) {
784 self.left_kv().steal_rightward();
786 self.left_kv().merge_children();
790 /// Left is underflowed. Tries to steal from the right,
791 /// but merges left and right if right is low too.
792 unsafe fn handle_underflow_to_right(&mut self) {
793 let right_len = self.node.edges()[self.index + 1].len();
794 if right_len > min_load_from_capacity(self.node.capacity()) {
795 self.right_kv().steal_leftward();
797 self.right_kv().merge_children();
802 impl<K, V, NodeRef: DerefMut<Node<K, V>>, NodeType> Handle<NodeRef, handle::Edge, NodeType> {
803 /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
804 /// This is unsafe because the handle might point to the first edge in the node, which has no
805 /// pair to its left.
806 unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
808 node: &mut *self.node,
809 index: self.index - 1
813 /// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
814 /// This is unsafe because the handle might point to the last edge in the node, which has no
815 /// pair to its right.
816 unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
818 node: &mut *self.node,
824 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
825 /// Turns the handle into references to the key and value it points at. This is necessary
826 /// because the returned pointers have larger lifetimes than what would be returned by `key`
828 pub fn into_kv(self) -> (&'a K, &'a V) {
829 let (keys, vals) = self.node.as_slices();
832 keys.unsafe_get(self.index),
833 vals.unsafe_get(self.index)
839 impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
840 /// Turns the handle into mutable references to the key and value it points at. This is
841 /// necessary because the returned pointers have larger lifetimes than what would be returned
842 /// by `key_mut` or `val_mut`.
843 pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
844 let (keys, vals) = self.node.as_slices_mut();
847 keys.unsafe_mut(self.index),
848 vals.unsafe_mut(self.index)
853 /// Convert this handle into one pointing at the edge immediately to the left of the key/value
854 /// pair pointed-to by this handle. This is useful because it returns a reference with larger
855 /// lifetime than `left_edge`.
856 pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
858 node: &mut *self.node,
864 impl<'a, K: 'a, V: 'a, NodeRef: Deref<Node<K, V>> + 'a, NodeType> Handle<NodeRef, handle::KV,
866 // These are fine to include, but are currently unneeded.
868 // /// Returns a reference to the key pointed-to by this handle. This doesn't return a
869 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
871 // pub fn key(&'a self) -> &'a K {
872 // unsafe { self.node.keys().unsafe_get(self.index) }
875 // /// Returns a reference to the value pointed-to by this handle. This doesn't return a
876 // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
878 // pub fn val(&'a self) -> &'a V {
879 // unsafe { self.node.vals().unsafe_get(self.index) }
883 impl<'a, K: 'a, V: 'a, NodeRef: DerefMut<Node<K, V>> + 'a, NodeType> Handle<NodeRef, handle::KV,
885 /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
886 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
888 pub fn key_mut(&'a mut self) -> &'a mut K {
889 unsafe { self.node.keys_mut().unsafe_mut(self.index) }
892 /// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
893 /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
895 pub fn val_mut(&'a mut self) -> &'a mut V {
896 unsafe { self.node.vals_mut().unsafe_mut(self.index) }
900 impl<K, V, NodeRef: DerefMut<Node<K, V>>, NodeType> Handle<NodeRef, handle::KV, NodeType> {
901 /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
902 /// to by this handle.
903 pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
905 node: &mut *self.node,
910 /// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
911 /// to by this handle.
912 pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
914 node: &mut *self.node,
915 index: self.index + 1
920 impl<K, V, NodeRef: DerefMut<Node<K, V>>> Handle<NodeRef, handle::KV, handle::Leaf> {
921 /// Removes the key/value pair at the handle's location.
923 /// # Panics (in debug build)
925 /// Panics if the node containing the pair is not a leaf node.
926 pub fn remove_as_leaf(mut self) -> (K, V) {
927 unsafe { self.node.remove_kv(self.index) }
931 impl<K, V, NodeRef: DerefMut<Node<K, V>>> Handle<NodeRef, handle::KV, handle::Internal> {
932 /// Steal! Stealing is roughly analogous to a binary tree rotation.
933 /// In this case, we're "rotating" right.
934 unsafe fn steal_rightward(&mut self) {
935 // Take the biggest stuff off left
936 let (mut key, mut val, edge) = {
937 let mut left_handle = self.left_edge();
938 let left = left_handle.edge_mut();
939 let (key, val) = left.pop_kv();
940 let edge = if left.is_leaf() {
943 Some(left.pop_edge())
949 // Swap the parent's separating key-value pair with left's
950 mem::swap(&mut key, self.key_mut());
951 mem::swap(&mut val, self.val_mut());
953 // Put them at the start of right
954 let mut right_handle = self.right_edge();
955 let right = right_handle.edge_mut();
956 right.insert_kv(0, key, val);
958 Some(edge) => right.insert_edge(0, edge),
963 /// Steal! Stealing is roughly analogous to a binary tree rotation.
964 /// In this case, we're "rotating" left.
965 unsafe fn steal_leftward(&mut self) {
966 // Take the smallest stuff off right
967 let (mut key, mut val, edge) = {
968 let mut right_handle = self.right_edge();
969 let right = right_handle.edge_mut();
970 let (key, val) = right.remove_kv(0);
971 let edge = if right.is_leaf() {
974 Some(right.remove_edge(0))
980 // Swap the parent's separating key-value pair with right's
981 mem::swap(&mut key, self.key_mut());
982 mem::swap(&mut val, self.val_mut());
984 // Put them at the end of left
985 let mut left_handle = self.left_edge();
986 let left = left_handle.edge_mut();
987 left.push_kv(key, val);
989 Some(edge) => left.push_edge(edge),
994 /// Merge! Smooshes left and right into one node, along with the key-value
995 /// pair that separated them in their parent.
996 unsafe fn merge_children(mut self) {
997 // Permanently remove right's index, and the key-value pair that separates
999 let (key, val) = self.node.remove_kv(self.index);
1000 let right = self.node.remove_edge(self.index + 1);
1002 // Give left right's stuff.
1003 self.left_edge().edge_mut()
1004 .absorb(key, val, right);
1008 impl<K, V> Node<K, V> {
1009 /// Returns the mutable handle pointing to the key/value pair at a given index.
1011 /// # Panics (in debug build)
1013 /// Panics if the given index is out of bounds.
1014 pub fn kv_handle(&mut self, index: uint) -> Handle<&mut Node<K, V>, handle::KV,
1015 handle::LeafOrInternal> {
1016 // Necessary for correctness, but in a private module
1017 debug_assert!(index < self.len(), "kv_handle index out of bounds");
1024 pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
1025 let is_leaf = self.is_leaf();
1026 let (keys, vals, edges) = self.as_slices_internal();
1028 inner: ElemsAndEdges(
1029 keys.iter().zip(vals.iter()),
1034 has_edges: !is_leaf,
1038 pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
1039 let is_leaf = self.is_leaf();
1040 let (keys, vals, edges) = self.as_slices_internal_mut();
1042 inner: ElemsAndEdges(
1043 keys.iter().zip(vals.iter_mut()),
1048 has_edges: !is_leaf,
1052 pub fn into_iter(self) -> MoveTraversal<K, V> {
1054 let ret = MoveTraversal {
1055 inner: MoveTraversalImpl {
1056 keys: RawItems::from_slice(self.keys()),
1057 vals: RawItems::from_slice(self.vals()),
1058 edges: RawItems::from_slice(self.edges()),
1060 ptr: self.keys as *mut u8,
1061 capacity: self.capacity(),
1062 is_leaf: self.is_leaf()
1066 has_edges: !self.is_leaf(),
1073 /// When a node has no keys or values and only a single edge, extract that edge.
1074 pub fn hoist_lone_child(&mut self) {
1075 // Necessary for correctness, but in a private module
1076 debug_assert!(self.len() == 0);
1077 debug_assert!(!self.is_leaf());
1080 let ret = ptr::read(self.edges().unsafe_get(0));
1082 ptr::write(self, ret);
1087 // Vector functions (all unchecked)
1088 impl<K, V> Node<K, V> {
1089 // This must be followed by push_edge on an internal node.
1091 unsafe fn push_kv(&mut self, key: K, val: V) {
1092 let len = self.len();
1094 ptr::write(self.keys_mut().unsafe_mut(len), key);
1095 ptr::write(self.vals_mut().unsafe_mut(len), val);
1100 // This can only be called immediately after a call to push_kv.
1102 unsafe fn push_edge(&mut self, edge: Node<K, V>) {
1103 let len = self.len();
1105 ptr::write(self.edges_mut().unsafe_mut(len), edge);
1108 // This must be followed by insert_edge on an internal node.
1110 unsafe fn insert_kv(&mut self, index: uint, key: K, val: V) -> &mut V {
1112 self.keys_mut().as_mut_ptr().offset(index as int + 1),
1113 self.keys().as_ptr().offset(index as int),
1117 self.vals_mut().as_mut_ptr().offset(index as int + 1),
1118 self.vals().as_ptr().offset(index as int),
1122 ptr::write(self.keys_mut().unsafe_mut(index), key);
1123 ptr::write(self.vals_mut().unsafe_mut(index), val);
1127 self.vals_mut().unsafe_mut(index)
1130 // This can only be called immediately after a call to insert_kv.
1132 unsafe fn insert_edge(&mut self, index: uint, edge: Node<K, V>) {
1134 self.edges_mut().as_mut_ptr().offset(index as int + 1),
1135 self.edges().as_ptr().offset(index as int),
1138 ptr::write(self.edges_mut().unsafe_mut(index), edge);
1141 // This must be followed by pop_edge on an internal node.
1143 unsafe fn pop_kv(&mut self) -> (K, V) {
1144 let key = ptr::read(self.keys().unsafe_get(self.len() - 1));
1145 let val = ptr::read(self.vals().unsafe_get(self.len() - 1));
1152 // This can only be called immediately after a call to pop_kv.
1154 unsafe fn pop_edge(&mut self) -> Node<K, V> {
1155 let edge = ptr::read(self.edges().unsafe_get(self.len() + 1));
1160 // This must be followed by remove_edge on an internal node.
1162 unsafe fn remove_kv(&mut self, index: uint) -> (K, V) {
1163 let key = ptr::read(self.keys().unsafe_get(index));
1164 let val = ptr::read(self.vals().unsafe_get(index));
1167 self.keys_mut().as_mut_ptr().offset(index as int),
1168 self.keys().as_ptr().offset(index as int + 1),
1169 self.len() - index - 1
1172 self.vals_mut().as_mut_ptr().offset(index as int),
1173 self.vals().as_ptr().offset(index as int + 1),
1174 self.len() - index - 1
1182 // This can only be called immediately after a call to remove_kv.
1184 unsafe fn remove_edge(&mut self, index: uint) -> Node<K, V> {
1185 let edge = ptr::read(self.edges().unsafe_get(index));
1188 self.edges_mut().as_mut_ptr().offset(index as int),
1189 self.edges().as_ptr().offset(index as int + 1),
1190 self.len() - index + 1
1197 // Private implementation details
1198 impl<K, V> Node<K, V> {
1199 /// Node is full, so split it into two nodes, and yield the middle-most key-value pair
1200 /// because we have one too many, and our parent now has one too few
1201 fn split(&mut self) -> (K, V, Node<K, V>) {
1202 // Necessary for correctness, but in a private funtion
1203 debug_assert!(self.len() > 0);
1205 let mut right = if self.is_leaf() {
1206 Node::new_leaf(self.capacity())
1208 unsafe { Node::new_internal(self.capacity()) }
1212 right._len = self.len() / 2;
1213 let right_offset = self.len() - right.len();
1214 ptr::copy_nonoverlapping_memory(
1215 right.keys_mut().as_mut_ptr(),
1216 self.keys().as_ptr().offset(right_offset as int),
1219 ptr::copy_nonoverlapping_memory(
1220 right.vals_mut().as_mut_ptr(),
1221 self.vals().as_ptr().offset(right_offset as int),
1224 if !self.is_leaf() {
1225 ptr::copy_nonoverlapping_memory(
1226 right.edges_mut().as_mut_ptr(),
1227 self.edges().as_ptr().offset(right_offset as int),
1232 let key = ptr::read(self.keys().unsafe_get(right_offset - 1));
1233 let val = ptr::read(self.vals().unsafe_get(right_offset - 1));
1235 self._len = right_offset - 1;
1241 /// Take all the values from right, seperated by the given key and value
1242 fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
1243 // Necessary for correctness, but in a private function
1244 // Just as a sanity check, make sure we can fit this guy in
1245 debug_assert!(self.len() + right.len() <= self.capacity());
1246 debug_assert!(self.is_leaf() == right.is_leaf());
1249 let old_len = self.len();
1250 self._len += right.len() + 1;
1252 ptr::write(self.keys_mut().unsafe_mut(old_len), key);
1253 ptr::write(self.vals_mut().unsafe_mut(old_len), val);
1255 ptr::copy_nonoverlapping_memory(
1256 self.keys_mut().as_mut_ptr().offset(old_len as int + 1),
1257 right.keys().as_ptr(),
1260 ptr::copy_nonoverlapping_memory(
1261 self.vals_mut().as_mut_ptr().offset(old_len as int + 1),
1262 right.vals().as_ptr(),
1265 if !self.is_leaf() {
1266 ptr::copy_nonoverlapping_memory(
1267 self.edges_mut().as_mut_ptr().offset(old_len as int + 1),
1268 right.edges().as_ptr(),
1279 /// Get the capacity of a node from the order of the parent B-Tree
1280 fn capacity_from_b(b: uint) -> uint {
1284 /// Get the minimum load of a node from its capacity
1285 fn min_load_from_capacity(cap: uint) -> uint {
1290 /// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
1291 /// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
1292 /// and a pair of `Iterator`s would require two independent destructors.
1293 trait TraversalImpl<K, V, E> {
1294 fn next_kv(&mut self) -> Option<(K, V)>;
1295 fn next_kv_back(&mut self) -> Option<(K, V)>;
1297 fn next_edge(&mut self) -> Option<E>;
1298 fn next_edge_back(&mut self) -> Option<E>;
1301 /// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
1302 /// as no deallocation needs to be done.
1303 struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
1305 impl<K, V, E, Elems: DoubleEndedIterator<(K, V)>, Edges: DoubleEndedIterator<E>>
1306 TraversalImpl<K, V, E> for ElemsAndEdges<Elems, Edges> {
1308 fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
1309 fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
1311 fn next_edge(&mut self) -> Option<E> { self.1.next() }
1312 fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
1315 /// A `TraversalImpl` taking a `Node` by value.
1316 struct MoveTraversalImpl<K, V> {
1319 edges: RawItems<Node<K, V>>,
1321 // For deallocation when we are done iterating.
1327 impl<K, V> TraversalImpl<K, V, Node<K, V>> for MoveTraversalImpl<K, V> {
1328 fn next_kv(&mut self) -> Option<(K, V)> {
1329 match (self.keys.next(), self.vals.next()) {
1330 (Some(k), Some(v)) => Some((k, v)),
1335 fn next_kv_back(&mut self) -> Option<(K, V)> {
1336 match (self.keys.next_back(), self.vals.next_back()) {
1337 (Some(k), Some(v)) => Some((k, v)),
1342 fn next_edge(&mut self) -> Option<Node<K, V>> {
1343 // Necessary for correctness, but in a private module
1344 debug_assert!(!self.is_leaf);
1348 fn next_edge_back(&mut self) -> Option<Node<K, V>> {
1349 // Necessary for correctness, but in a private module
1350 debug_assert!(!self.is_leaf);
1351 self.edges.next_back()
1355 #[unsafe_destructor]
1356 impl<K, V> Drop for MoveTraversalImpl<K, V> {
1357 fn drop(&mut self) {
1358 // We need to cleanup the stored values manually, as the RawItems destructor would run
1359 // after our deallocation.
1360 for _ in self.keys {}
1361 for _ in self.vals {}
1362 for _ in self.edges {}
1364 let (alignment, size) =
1365 calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
1366 unsafe { heap::deallocate(self.ptr, size, alignment) };
1370 /// An abstraction over all the different kinds of traversals a node supports
1371 struct AbsTraversal<Impl> {
1378 /// A single atomic step in a traversal. Either an element is visited, or an edge is followed
1379 pub enum TraversalItem<K, V, E> {
1384 /// A traversal over a node's entries and edges
1385 pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1386 slice::Iter<'a, V>>,
1387 slice::Iter<'a, Node<K, V>>>>;
1389 /// A mutable traversal over a node's entries and edges
1390 pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
1391 slice::IterMut<'a, V>>,
1392 slice::IterMut<'a, Node<K, V>>>>;
1394 /// An owning traversal over a node's entries and edges
1395 pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
1398 impl<K, V, E, Impl: TraversalImpl<K, V, E>>
1399 Iterator<TraversalItem<K, V, E>> for AbsTraversal<Impl> {
1401 fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
1402 let head_is_edge = self.head_is_edge;
1403 self.head_is_edge = !head_is_edge;
1405 if head_is_edge && self.has_edges {
1406 self.inner.next_edge().map(|node| Edge(node))
1408 self.inner.next_kv().map(|(k, v)| Elem(k, v))
1413 impl<K, V, E, Impl: TraversalImpl<K, V, E>>
1414 DoubleEndedIterator<TraversalItem<K, V, E>> for AbsTraversal<Impl> {
1416 fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
1417 let tail_is_edge = self.tail_is_edge;
1418 self.tail_is_edge = !tail_is_edge;
1420 if tail_is_edge && self.has_edges {
1421 self.inner.next_edge_back().map(|node| Edge(node))
1423 self.inner.next_kv_back().map(|(k, v)| Elem(k, v))