1 use core::borrow::Borrow;
3 use core::ops::RangeBounds;
6 use super::node::{marker, ForceResult::*, Handle, NodeRef};
8 use crate::alloc::Allocator;
9 // `front` and `back` are always both `None` or both `Some`.
10 pub struct LeafRange<BorrowType, K, V> {
11 front: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
12 back: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
15 impl<'a, K: 'a, V: 'a> Clone for LeafRange<marker::Immut<'a>, K, V> {
16 fn clone(&self) -> Self {
17 LeafRange { front: self.front.clone(), back: self.back.clone() }
21 impl<BorrowType, K, V> LeafRange<BorrowType, K, V> {
22 pub fn none() -> Self {
23 LeafRange { front: None, back: None }
26 fn is_empty(&self) -> bool {
27 self.front == self.back
30 /// Temporarily takes out another, immutable equivalent of the same range.
31 pub fn reborrow(&self) -> LeafRange<marker::Immut<'_>, K, V> {
33 front: self.front.as_ref().map(|f| f.reborrow()),
34 back: self.back.as_ref().map(|b| b.reborrow()),
39 impl<'a, K, V> LeafRange<marker::Immut<'a>, K, V> {
41 pub fn next_checked(&mut self) -> Option<(&'a K, &'a V)> {
42 self.perform_next_checked(|kv| kv.into_kv())
46 pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a V)> {
47 self.perform_next_back_checked(|kv| kv.into_kv())
51 impl<'a, K, V> LeafRange<marker::ValMut<'a>, K, V> {
53 pub fn next_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
54 self.perform_next_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut())
58 pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
59 self.perform_next_back_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut())
63 impl<BorrowType: marker::BorrowType, K, V> LeafRange<BorrowType, K, V> {
64 /// If possible, extract some result from the following KV and move to the edge beyond it.
65 fn perform_next_checked<F, R>(&mut self, f: F) -> Option<R>
67 F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
72 super::mem::replace(self.front.as_mut().unwrap(), |front| {
73 let kv = front.next_kv().ok().unwrap();
75 (kv.next_leaf_edge(), Some(result))
80 /// If possible, extract some result from the preceding KV and move to the edge beyond it.
81 fn perform_next_back_checked<F, R>(&mut self, f: F) -> Option<R>
83 F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
88 super::mem::replace(self.back.as_mut().unwrap(), |back| {
89 let kv = back.next_back_kv().ok().unwrap();
91 (kv.next_back_leaf_edge(), Some(result))
97 enum LazyLeafHandle<BorrowType, K, V> {
98 Root(NodeRef<BorrowType, K, V, marker::LeafOrInternal>), // not yet descended
99 Edge(Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>),
102 impl<'a, K: 'a, V: 'a> Clone for LazyLeafHandle<marker::Immut<'a>, K, V> {
103 fn clone(&self) -> Self {
105 LazyLeafHandle::Root(root) => LazyLeafHandle::Root(*root),
106 LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(*edge),
111 impl<BorrowType, K, V> LazyLeafHandle<BorrowType, K, V> {
112 fn reborrow(&self) -> LazyLeafHandle<marker::Immut<'_>, K, V> {
114 LazyLeafHandle::Root(root) => LazyLeafHandle::Root(root.reborrow()),
115 LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(edge.reborrow()),
120 // `front` and `back` are always both `None` or both `Some`.
121 pub struct LazyLeafRange<BorrowType, K, V> {
122 front: Option<LazyLeafHandle<BorrowType, K, V>>,
123 back: Option<LazyLeafHandle<BorrowType, K, V>>,
126 impl<'a, K: 'a, V: 'a> Clone for LazyLeafRange<marker::Immut<'a>, K, V> {
127 fn clone(&self) -> Self {
128 LazyLeafRange { front: self.front.clone(), back: self.back.clone() }
132 impl<BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
133 pub fn none() -> Self {
134 LazyLeafRange { front: None, back: None }
137 /// Temporarily takes out another, immutable equivalent of the same range.
138 pub fn reborrow(&self) -> LazyLeafRange<marker::Immut<'_>, K, V> {
140 front: self.front.as_ref().map(|f| f.reborrow()),
141 back: self.back.as_ref().map(|b| b.reborrow()),
146 impl<'a, K, V> LazyLeafRange<marker::Immut<'a>, K, V> {
148 pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
149 unsafe { self.init_front().unwrap().next_unchecked() }
153 pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
154 unsafe { self.init_back().unwrap().next_back_unchecked() }
158 impl<'a, K, V> LazyLeafRange<marker::ValMut<'a>, K, V> {
160 pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
161 unsafe { self.init_front().unwrap().next_unchecked() }
165 pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
166 unsafe { self.init_back().unwrap().next_back_unchecked() }
170 impl<K, V> LazyLeafRange<marker::Dying, K, V> {
173 ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>> {
174 match self.front.take()? {
175 LazyLeafHandle::Root(root) => Some(root.first_leaf_edge()),
176 LazyLeafHandle::Edge(edge) => Some(edge),
181 pub unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
184 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
185 debug_assert!(self.front.is_some());
186 let front = self.init_front().unwrap();
187 unsafe { front.deallocating_next_unchecked(alloc) }
191 pub unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
194 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
195 debug_assert!(self.back.is_some());
196 let back = self.init_back().unwrap();
197 unsafe { back.deallocating_next_back_unchecked(alloc) }
201 pub fn deallocating_end<A: Allocator + Clone>(&mut self, alloc: A) {
202 if let Some(front) = self.take_front() {
203 front.deallocating_end(alloc)
208 impl<BorrowType: marker::BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
211 ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
212 if let Some(LazyLeafHandle::Root(root)) = &self.front {
213 self.front = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.first_leaf_edge()));
215 match &mut self.front {
217 Some(LazyLeafHandle::Edge(edge)) => Some(edge),
218 // SAFETY: the code above would have replaced it.
219 Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
225 ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
226 if let Some(LazyLeafHandle::Root(root)) = &self.back {
227 self.back = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.last_leaf_edge()));
229 match &mut self.back {
231 Some(LazyLeafHandle::Edge(edge)) => Some(edge),
232 // SAFETY: the code above would have replaced it.
233 Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
238 impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
239 /// Finds the distinct leaf edges delimiting a specified range in a tree.
241 /// If such distinct edges exist, returns them in ascending order, meaning
242 /// that a non-zero number of calls to `next_unchecked` on the `front` of
243 /// the result and/or calls to `next_back_unchecked` on the `back` of the
244 /// result will eventually reach the same edge.
246 /// If there are no such edges, i.e., if the tree contains no key within
247 /// the range, returns an empty `front` and `back`.
250 /// Unless `BorrowType` is `Immut`, do not use the handles to visit the same
252 unsafe fn find_leaf_edges_spanning_range<Q: ?Sized, R>(
255 ) -> LeafRange<BorrowType, K, V>
261 match self.search_tree_for_bifurcation(&range) {
262 Err(_) => LeafRange::none(),
267 mut lower_child_bound,
268 mut upper_child_bound,
270 let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) };
271 let mut upper_edge = unsafe { Handle::new_edge(node, upper_edge_idx) };
273 match (lower_edge.force(), upper_edge.force()) {
274 (Leaf(f), Leaf(b)) => return LeafRange { front: Some(f), back: Some(b) },
275 (Internal(f), Internal(b)) => {
276 (lower_edge, lower_child_bound) =
277 f.descend().find_lower_bound_edge(lower_child_bound);
278 (upper_edge, upper_child_bound) =
279 b.descend().find_upper_bound_edge(upper_child_bound);
281 _ => unreachable!("BTreeMap has different depths"),
289 fn full_range<BorrowType: marker::BorrowType, K, V>(
290 root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
291 root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
292 ) -> LazyLeafRange<BorrowType, K, V> {
294 front: Some(LazyLeafHandle::Root(root1)),
295 back: Some(LazyLeafHandle::Root(root2)),
299 impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
300 /// Finds the pair of leaf edges delimiting a specific range in a tree.
302 /// The result is meaningful only if the tree is ordered by key, like the tree
303 /// in a `BTreeMap` is.
304 pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::Immut<'a>, K, V>
310 // SAFETY: our borrow type is immutable.
311 unsafe { self.find_leaf_edges_spanning_range(range) }
314 /// Finds the pair of leaf edges delimiting an entire tree.
315 pub fn full_range(self) -> LazyLeafRange<marker::Immut<'a>, K, V> {
316 full_range(self, self)
320 impl<'a, K: 'a, V: 'a> NodeRef<marker::ValMut<'a>, K, V, marker::LeafOrInternal> {
321 /// Splits a unique reference into a pair of leaf edges delimiting a specified range.
322 /// The result are non-unique references allowing (some) mutation, which must be used
325 /// The result is meaningful only if the tree is ordered by key, like the tree
326 /// in a `BTreeMap` is.
329 /// Do not use the duplicate handles to visit the same KV twice.
330 pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::ValMut<'a>, K, V>
336 unsafe { self.find_leaf_edges_spanning_range(range) }
339 /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
340 /// The results are non-unique references allowing mutation (of values only), so must be used
342 pub fn full_range(self) -> LazyLeafRange<marker::ValMut<'a>, K, V> {
343 // We duplicate the root NodeRef here -- we will never visit the same KV
344 // twice, and never end up with overlapping value references.
345 let self2 = unsafe { ptr::read(&self) };
346 full_range(self, self2)
350 impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
351 /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
352 /// The results are non-unique references allowing massively destructive mutation, so must be
353 /// used with the utmost care.
354 pub fn full_range(self) -> LazyLeafRange<marker::Dying, K, V> {
355 // We duplicate the root NodeRef here -- we will never access it in a way
356 // that overlaps references obtained from the root.
357 let self2 = unsafe { ptr::read(&self) };
358 full_range(self, self2)
362 impl<BorrowType: marker::BorrowType, K, V>
363 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
365 /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
366 /// on the right side, which is either in the same leaf node or in an ancestor node.
367 /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
371 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
372 NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
374 let mut edge = self.forget_node_type();
376 edge = match edge.right_kv() {
377 Ok(kv) => return Ok(kv),
378 Err(last_edge) => match last_edge.into_node().ascend() {
379 Ok(parent_edge) => parent_edge.forget_node_type(),
380 Err(root) => return Err(root),
386 /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
387 /// on the left side, which is either in the same leaf node or in an ancestor node.
388 /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
392 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
393 NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
395 let mut edge = self.forget_node_type();
397 edge = match edge.left_kv() {
398 Ok(kv) => return Ok(kv),
399 Err(last_edge) => match last_edge.into_node().ascend() {
400 Ok(parent_edge) => parent_edge.forget_node_type(),
401 Err(root) => return Err(root),
408 impl<BorrowType: marker::BorrowType, K, V>
409 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
411 /// Given an internal edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
412 /// on the right side, which is either in the same internal node or in an ancestor node.
413 /// If the internal edge is the last one in the tree, returns [`Result::Err`] with the root node.
417 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>,
418 NodeRef<BorrowType, K, V, marker::Internal>,
422 edge = match edge.right_kv() {
423 Ok(internal_kv) => return Ok(internal_kv),
424 Err(last_edge) => match last_edge.into_node().ascend() {
425 Ok(parent_edge) => parent_edge,
426 Err(root) => return Err(root),
433 impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
434 /// Given a leaf edge handle into a dying tree, returns the next leaf edge
435 /// on the right side, and the key-value pair in between, if they exist.
437 /// If the given edge is the last one in a leaf, this method deallocates
438 /// the leaf, as well as any ancestor nodes whose last edge was reached.
439 /// This implies that if no more key-value pair follows, the entire tree
440 /// will have been deallocated and there is nothing left to return.
443 /// - The given edge must not have been previously returned by counterpart
444 /// `deallocating_next_back`.
445 /// - The returned KV handle is only valid to access the key and value,
446 /// and only valid until the next call to a `deallocating_` method.
447 unsafe fn deallocating_next<A: Allocator + Clone>(
450 ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
452 let mut edge = self.forget_node_type();
454 edge = match edge.right_kv() {
455 Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)),
457 match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
458 Some(parent_edge) => parent_edge.forget_node_type(),
466 /// Given a leaf edge handle into a dying tree, returns the next leaf edge
467 /// on the left side, and the key-value pair in between, if they exist.
469 /// If the given edge is the first one in a leaf, this method deallocates
470 /// the leaf, as well as any ancestor nodes whose first edge was reached.
471 /// This implies that if no more key-value pair follows, the entire tree
472 /// will have been deallocated and there is nothing left to return.
475 /// - The given edge must not have been previously returned by counterpart
476 /// `deallocating_next`.
477 /// - The returned KV handle is only valid to access the key and value,
478 /// and only valid until the next call to a `deallocating_` method.
479 unsafe fn deallocating_next_back<A: Allocator + Clone>(
482 ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
484 let mut edge = self.forget_node_type();
486 edge = match edge.left_kv() {
487 Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)),
489 match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
490 Some(parent_edge) => parent_edge.forget_node_type(),
498 /// Deallocates a pile of nodes from the leaf up to the root.
499 /// This is the only way to deallocate the remainder of a tree after
500 /// `deallocating_next` and `deallocating_next_back` have been nibbling at
501 /// both sides of the tree, and have hit the same edge. As it is intended
502 /// only to be called when all keys and values have been returned,
503 /// no cleanup is done on any of the keys or values.
504 fn deallocating_end<A: Allocator + Clone>(self, alloc: A) {
505 let mut edge = self.forget_node_type();
506 while let Some(parent_edge) =
507 unsafe { edge.into_node().deallocate_and_ascend(alloc.clone()) }
509 edge = parent_edge.forget_node_type();
514 impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
515 /// Moves the leaf edge handle to the next leaf edge and returns references to the
516 /// key and value in between.
519 /// There must be another KV in the direction travelled.
520 unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
521 super::mem::replace(self, |leaf_edge| {
522 let kv = leaf_edge.next_kv().ok().unwrap();
523 (kv.next_leaf_edge(), kv.into_kv())
527 /// Moves the leaf edge handle to the previous leaf edge and returns references to the
528 /// key and value in between.
531 /// There must be another KV in the direction travelled.
532 unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
533 super::mem::replace(self, |leaf_edge| {
534 let kv = leaf_edge.next_back_kv().ok().unwrap();
535 (kv.next_back_leaf_edge(), kv.into_kv())
540 impl<'a, K, V> Handle<NodeRef<marker::ValMut<'a>, K, V, marker::Leaf>, marker::Edge> {
541 /// Moves the leaf edge handle to the next leaf edge and returns references to the
542 /// key and value in between.
545 /// There must be another KV in the direction travelled.
546 unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
547 let kv = super::mem::replace(self, |leaf_edge| {
548 let kv = leaf_edge.next_kv().ok().unwrap();
549 (unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)
551 // Doing this last is faster, according to benchmarks.
555 /// Moves the leaf edge handle to the previous leaf and returns references to the
556 /// key and value in between.
559 /// There must be another KV in the direction travelled.
560 unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
561 let kv = super::mem::replace(self, |leaf_edge| {
562 let kv = leaf_edge.next_back_kv().ok().unwrap();
563 (unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)
565 // Doing this last is faster, according to benchmarks.
570 impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
571 /// Moves the leaf edge handle to the next leaf edge and returns the key and value
572 /// in between, deallocating any node left behind while leaving the corresponding
573 /// edge in its parent node dangling.
576 /// - There must be another KV in the direction travelled.
577 /// - That KV was not previously returned by counterpart
578 /// `deallocating_next_back_unchecked` on any copy of the handles
579 /// being used to traverse the tree.
581 /// The only safe way to proceed with the updated handle is to compare it, drop it,
582 /// or call this method or counterpart `deallocating_next_back_unchecked` again.
583 unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
586 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
587 super::mem::replace(self, |leaf_edge| unsafe {
588 leaf_edge.deallocating_next(alloc).unwrap()
592 /// Moves the leaf edge handle to the previous leaf edge and returns the key and value
593 /// in between, deallocating any node left behind while leaving the corresponding
594 /// edge in its parent node dangling.
597 /// - There must be another KV in the direction travelled.
598 /// - That leaf edge was not previously returned by counterpart
599 /// `deallocating_next_unchecked` on any copy of the handles
600 /// being used to traverse the tree.
602 /// The only safe way to proceed with the updated handle is to compare it, drop it,
603 /// or call this method or counterpart `deallocating_next_unchecked` again.
604 unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
607 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
608 super::mem::replace(self, |leaf_edge| unsafe {
609 leaf_edge.deallocating_next_back(alloc).unwrap()
614 impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
615 /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
616 /// you need first when navigating forward (or last when navigating backward).
618 pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
622 Leaf(leaf) => return leaf.first_edge(),
623 Internal(internal) => node = internal.first_edge().descend(),
628 /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
629 /// you need last when navigating forward (or first when navigating backward).
631 pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
635 Leaf(leaf) => return leaf.last_edge(),
636 Internal(internal) => node = internal.last_edge().descend(),
642 pub enum Position<BorrowType, K, V> {
643 Leaf(NodeRef<BorrowType, K, V, marker::Leaf>),
644 Internal(NodeRef<BorrowType, K, V, marker::Internal>),
645 InternalKV(Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>),
648 impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
649 /// Visits leaf nodes and internal KVs in order of ascending keys, and also
650 /// visits internal nodes as a whole in a depth first order, meaning that
651 /// internal nodes precede their individual KVs and their child nodes.
652 pub fn visit_nodes_in_order<F>(self, mut visit: F)
654 F: FnMut(Position<marker::Immut<'a>, K, V>),
657 Leaf(leaf) => visit(Position::Leaf(leaf)),
658 Internal(internal) => {
659 visit(Position::Internal(internal));
660 let mut edge = internal.first_edge();
662 edge = match edge.descend().force() {
664 visit(Position::Leaf(leaf));
665 match edge.next_kv() {
667 visit(Position::InternalKV(kv));
673 Internal(internal) => {
674 visit(Position::Internal(internal));
675 internal.first_edge()
683 /// Calculates the number of elements in a (sub)tree.
684 pub fn calc_length(self) -> usize {
686 self.visit_nodes_in_order(|pos| match pos {
687 Position::Leaf(node) => result += node.len(),
688 Position::Internal(node) => result += node.len(),
689 Position::InternalKV(_) => (),
695 impl<BorrowType: marker::BorrowType, K, V>
696 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>
698 /// Returns the leaf edge closest to a KV for forward navigation.
699 pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
701 Leaf(leaf_kv) => leaf_kv.right_edge(),
702 Internal(internal_kv) => {
703 let next_internal_edge = internal_kv.right_edge();
704 next_internal_edge.descend().first_leaf_edge()
709 /// Returns the leaf edge closest to a KV for backward navigation.
710 fn next_back_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
712 Leaf(leaf_kv) => leaf_kv.left_edge(),
713 Internal(internal_kv) => {
714 let next_internal_edge = internal_kv.left_edge();
715 next_internal_edge.descend().last_leaf_edge()