1 use core::borrow::Borrow;
3 use core::ops::RangeBounds;
6 use super::node::{marker, ForceResult::*, Handle, NodeRef};
8 // `front` and `back` are always both `None` or both `Some`.
9 pub struct LeafRange<BorrowType, K, V> {
10 front: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
11 back: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
14 impl<'a, K: 'a, V: 'a> Clone for LeafRange<marker::Immut<'a>, K, V> {
15 fn clone(&self) -> Self {
16 LeafRange { front: self.front.clone(), back: self.back.clone() }
20 impl<BorrowType, K, V> LeafRange<BorrowType, K, V> {
21 pub fn none() -> Self {
22 LeafRange { front: None, back: None }
25 fn is_empty(&self) -> bool {
26 self.front == self.back
29 /// Temporarily takes out another, immutable equivalent of the same range.
30 pub fn reborrow(&self) -> LeafRange<marker::Immut<'_>, K, V> {
32 front: self.front.as_ref().map(|f| f.reborrow()),
33 back: self.back.as_ref().map(|b| b.reborrow()),
38 impl<'a, K, V> LeafRange<marker::Immut<'a>, K, V> {
40 pub fn next_checked(&mut self) -> Option<(&'a K, &'a V)> {
41 self.perform_next_checked(|kv| kv.into_kv())
45 pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a V)> {
46 self.perform_next_back_checked(|kv| kv.into_kv())
50 impl<'a, K, V> LeafRange<marker::ValMut<'a>, K, V> {
52 pub fn next_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
53 self.perform_next_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut())
57 pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
58 self.perform_next_back_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut())
62 impl<BorrowType: marker::BorrowType, K, V> LeafRange<BorrowType, K, V> {
63 /// If possible, extract some result from the following KV and move to the edge beyond it.
64 fn perform_next_checked<F, R>(&mut self, f: F) -> Option<R>
66 F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
71 super::mem::replace(self.front.as_mut().unwrap(), |front| {
72 let kv = front.next_kv().ok().unwrap();
74 (kv.next_leaf_edge(), Some(result))
79 /// If possible, extract some result from the preceding KV and move to the edge beyond it.
80 fn perform_next_back_checked<F, R>(&mut self, f: F) -> Option<R>
82 F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
87 super::mem::replace(self.back.as_mut().unwrap(), |back| {
88 let kv = back.next_back_kv().ok().unwrap();
90 (kv.next_back_leaf_edge(), Some(result))
96 enum LazyLeafHandle<BorrowType, K, V> {
97 Root(NodeRef<BorrowType, K, V, marker::LeafOrInternal>), // not yet descended
98 Edge(Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>),
101 impl<'a, K: 'a, V: 'a> Clone for LazyLeafHandle<marker::Immut<'a>, K, V> {
102 fn clone(&self) -> Self {
104 LazyLeafHandle::Root(root) => LazyLeafHandle::Root(*root),
105 LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(*edge),
110 impl<BorrowType, K, V> LazyLeafHandle<BorrowType, K, V> {
111 fn reborrow(&self) -> LazyLeafHandle<marker::Immut<'_>, K, V> {
113 LazyLeafHandle::Root(root) => LazyLeafHandle::Root(root.reborrow()),
114 LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(edge.reborrow()),
119 // `front` and `back` are always both `None` or both `Some`.
120 pub struct LazyLeafRange<BorrowType, K, V> {
121 front: Option<LazyLeafHandle<BorrowType, K, V>>,
122 back: Option<LazyLeafHandle<BorrowType, K, V>>,
125 impl<'a, K: 'a, V: 'a> Clone for LazyLeafRange<marker::Immut<'a>, K, V> {
126 fn clone(&self) -> Self {
127 LazyLeafRange { front: self.front.clone(), back: self.back.clone() }
131 impl<BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
132 pub fn none() -> Self {
133 LazyLeafRange { front: None, back: None }
136 /// Temporarily takes out another, immutable equivalent of the same range.
137 pub fn reborrow(&self) -> LazyLeafRange<marker::Immut<'_>, K, V> {
139 front: self.front.as_ref().map(|f| f.reborrow()),
140 back: self.back.as_ref().map(|b| b.reborrow()),
145 impl<'a, K, V> LazyLeafRange<marker::Immut<'a>, K, V> {
147 pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
148 unsafe { self.init_front().unwrap().next_unchecked() }
152 pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
153 unsafe { self.init_back().unwrap().next_back_unchecked() }
157 impl<'a, K, V> LazyLeafRange<marker::ValMut<'a>, K, V> {
159 pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
160 unsafe { self.init_front().unwrap().next_unchecked() }
164 pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
165 unsafe { self.init_back().unwrap().next_back_unchecked() }
169 impl<K, V> LazyLeafRange<marker::Dying, K, V> {
172 ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>> {
173 match self.front.take()? {
174 LazyLeafHandle::Root(root) => Some(root.first_leaf_edge()),
175 LazyLeafHandle::Edge(edge) => Some(edge),
180 pub unsafe fn deallocating_next_unchecked(
182 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
183 debug_assert!(self.front.is_some());
184 let front = self.init_front().unwrap();
185 unsafe { front.deallocating_next_unchecked() }
189 pub unsafe fn deallocating_next_back_unchecked(
191 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
192 debug_assert!(self.back.is_some());
193 let back = self.init_back().unwrap();
194 unsafe { back.deallocating_next_back_unchecked() }
198 pub fn deallocating_end(&mut self) {
199 if let Some(front) = self.take_front() {
200 front.deallocating_end()
205 impl<BorrowType: marker::BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
208 ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
209 if let Some(LazyLeafHandle::Root(root)) = &self.front {
210 self.front = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.first_leaf_edge()));
212 match &mut self.front {
214 Some(LazyLeafHandle::Edge(edge)) => Some(edge),
215 // SAFETY: the code above would have replaced it.
216 Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
222 ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
223 if let Some(LazyLeafHandle::Root(root)) = &self.back {
224 self.back = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.last_leaf_edge()));
226 match &mut self.back {
228 Some(LazyLeafHandle::Edge(edge)) => Some(edge),
229 // SAFETY: the code above would have replaced it.
230 Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
235 impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
236 /// Finds the distinct leaf edges delimiting a specified range in a tree.
238 /// If such distinct edges exist, returns them in ascending order, meaning
239 /// that a non-zero number of calls to `next_unchecked` on the `front` of
240 /// the result and/or calls to `next_back_unchecked` on the `back` of the
241 /// result will eventually reach the same edge.
243 /// If there are no such edges, i.e., if the tree contains no key within
244 /// the range, returns an empty `front` and `back`.
247 /// Unless `BorrowType` is `Immut`, do not use the handles to visit the same
249 unsafe fn find_leaf_edges_spanning_range<Q: ?Sized, R>(
252 ) -> LeafRange<BorrowType, K, V>
258 match self.search_tree_for_bifurcation(&range) {
259 Err(_) => LeafRange::none(),
264 mut lower_child_bound,
265 mut upper_child_bound,
267 let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) };
268 let mut upper_edge = unsafe { Handle::new_edge(node, upper_edge_idx) };
270 match (lower_edge.force(), upper_edge.force()) {
271 (Leaf(f), Leaf(b)) => return LeafRange { front: Some(f), back: Some(b) },
272 (Internal(f), Internal(b)) => {
273 (lower_edge, lower_child_bound) =
274 f.descend().find_lower_bound_edge(lower_child_bound);
275 (upper_edge, upper_child_bound) =
276 b.descend().find_upper_bound_edge(upper_child_bound);
278 _ => unreachable!("BTreeMap has different depths"),
286 fn full_range<BorrowType: marker::BorrowType, K, V>(
287 root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
288 root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
289 ) -> LazyLeafRange<BorrowType, K, V> {
291 front: Some(LazyLeafHandle::Root(root1)),
292 back: Some(LazyLeafHandle::Root(root2)),
296 impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
297 /// Finds the pair of leaf edges delimiting a specific range in a tree.
299 /// The result is meaningful only if the tree is ordered by key, like the tree
300 /// in a `BTreeMap` is.
301 pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::Immut<'a>, K, V>
307 // SAFETY: our borrow type is immutable.
308 unsafe { self.find_leaf_edges_spanning_range(range) }
311 /// Finds the pair of leaf edges delimiting an entire tree.
312 pub fn full_range(self) -> LazyLeafRange<marker::Immut<'a>, K, V> {
313 full_range(self, self)
317 impl<'a, K: 'a, V: 'a> NodeRef<marker::ValMut<'a>, K, V, marker::LeafOrInternal> {
318 /// Splits a unique reference into a pair of leaf edges delimiting a specified range.
319 /// The result are non-unique references allowing (some) mutation, which must be used
322 /// The result is meaningful only if the tree is ordered by key, like the tree
323 /// in a `BTreeMap` is.
326 /// Do not use the duplicate handles to visit the same KV twice.
327 pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::ValMut<'a>, K, V>
333 unsafe { self.find_leaf_edges_spanning_range(range) }
336 /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
337 /// The results are non-unique references allowing mutation (of values only), so must be used
339 pub fn full_range(self) -> LazyLeafRange<marker::ValMut<'a>, K, V> {
340 // We duplicate the root NodeRef here -- we will never visit the same KV
341 // twice, and never end up with overlapping value references.
342 let self2 = unsafe { ptr::read(&self) };
343 full_range(self, self2)
347 impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
348 /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
349 /// The results are non-unique references allowing massively destructive mutation, so must be
350 /// used with the utmost care.
351 pub fn full_range(self) -> LazyLeafRange<marker::Dying, K, V> {
352 // We duplicate the root NodeRef here -- we will never access it in a way
353 // that overlaps references obtained from the root.
354 let self2 = unsafe { ptr::read(&self) };
355 full_range(self, self2)
359 impl<BorrowType: marker::BorrowType, K, V>
360 Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
362 /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
363 /// on the right side, which is either in the same leaf node or in an ancestor node.
364 /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
368 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
369 NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
371 let mut edge = self.forget_node_type();
373 edge = match edge.right_kv() {
374 Ok(kv) => return Ok(kv),
375 Err(last_edge) => match last_edge.into_node().ascend() {
376 Ok(parent_edge) => parent_edge.forget_node_type(),
377 Err(root) => return Err(root),
383 /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
384 /// on the left side, which is either in the same leaf node or in an ancestor node.
385 /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
389 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
390 NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
392 let mut edge = self.forget_node_type();
394 edge = match edge.left_kv() {
395 Ok(kv) => return Ok(kv),
396 Err(last_edge) => match last_edge.into_node().ascend() {
397 Ok(parent_edge) => parent_edge.forget_node_type(),
398 Err(root) => return Err(root),
405 impl<BorrowType: marker::BorrowType, K, V>
406 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
408 /// Given an internal edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
409 /// on the right side, which is either in the same internal node or in an ancestor node.
410 /// If the internal edge is the last one in the tree, returns [`Result::Err`] with the root node.
414 Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>,
415 NodeRef<BorrowType, K, V, marker::Internal>,
419 edge = match edge.right_kv() {
420 Ok(internal_kv) => return Ok(internal_kv),
421 Err(last_edge) => match last_edge.into_node().ascend() {
422 Ok(parent_edge) => parent_edge,
423 Err(root) => return Err(root),
430 impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
431 /// Given a leaf edge handle into a dying tree, returns the next leaf edge
432 /// on the right side, and the key-value pair in between, if they exist.
434 /// If the given edge is the last one in a leaf, this method deallocates
435 /// the leaf, as well as any ancestor nodes whose last edge was reached.
436 /// This implies that if no more key-value pair follows, the entire tree
437 /// will have been deallocated and there is nothing left to return.
440 /// - The given edge must not have been previously returned by counterpart
441 /// `deallocating_next_back`.
442 /// - The returned KV handle is only valid to access the key and value,
443 /// and only valid until the next call to a `deallocating_` method.
444 unsafe fn deallocating_next(
446 ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
448 let mut edge = self.forget_node_type();
450 edge = match edge.right_kv() {
451 Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)),
452 Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
453 Some(parent_edge) => parent_edge.forget_node_type(),
460 /// Given a leaf edge handle into a dying tree, returns the next leaf edge
461 /// on the left side, and the key-value pair in between, if they exist.
463 /// If the given edge is the first one in a leaf, this method deallocates
464 /// the leaf, as well as any ancestor nodes whose first edge was reached.
465 /// This implies that if no more key-value pair follows, the entire tree
466 /// will have been deallocated and there is nothing left to return.
469 /// - The given edge must not have been previously returned by counterpart
470 /// `deallocating_next`.
471 /// - The returned KV handle is only valid to access the key and value,
472 /// and only valid until the next call to a `deallocating_` method.
473 unsafe fn deallocating_next_back(
475 ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
477 let mut edge = self.forget_node_type();
479 edge = match edge.left_kv() {
480 Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)),
481 Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
482 Some(parent_edge) => parent_edge.forget_node_type(),
489 /// Deallocates a pile of nodes from the leaf up to the root.
490 /// This is the only way to deallocate the remainder of a tree after
491 /// `deallocating_next` and `deallocating_next_back` have been nibbling at
492 /// both sides of the tree, and have hit the same edge. As it is intended
493 /// only to be called when all keys and values have been returned,
494 /// no cleanup is done on any of the keys or values.
495 fn deallocating_end(self) {
496 let mut edge = self.forget_node_type();
497 while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } {
498 edge = parent_edge.forget_node_type();
503 impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
504 /// Moves the leaf edge handle to the next leaf edge and returns references to the
505 /// key and value in between.
508 /// There must be another KV in the direction travelled.
509 unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
510 super::mem::replace(self, |leaf_edge| {
511 let kv = leaf_edge.next_kv().ok().unwrap();
512 (kv.next_leaf_edge(), kv.into_kv())
516 /// Moves the leaf edge handle to the previous leaf edge and returns references to the
517 /// key and value in between.
520 /// There must be another KV in the direction travelled.
521 unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
522 super::mem::replace(self, |leaf_edge| {
523 let kv = leaf_edge.next_back_kv().ok().unwrap();
524 (kv.next_back_leaf_edge(), kv.into_kv())
529 impl<'a, K, V> Handle<NodeRef<marker::ValMut<'a>, K, V, marker::Leaf>, marker::Edge> {
530 /// Moves the leaf edge handle to the next leaf edge and returns references to the
531 /// key and value in between.
534 /// There must be another KV in the direction travelled.
535 unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
536 let kv = super::mem::replace(self, |leaf_edge| {
537 let kv = leaf_edge.next_kv().ok().unwrap();
538 (unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)
540 // Doing this last is faster, according to benchmarks.
544 /// Moves the leaf edge handle to the previous leaf and returns references to the
545 /// key and value in between.
548 /// There must be another KV in the direction travelled.
549 unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
550 let kv = super::mem::replace(self, |leaf_edge| {
551 let kv = leaf_edge.next_back_kv().ok().unwrap();
552 (unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)
554 // Doing this last is faster, according to benchmarks.
559 impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
560 /// Moves the leaf edge handle to the next leaf edge and returns the key and value
561 /// in between, deallocating any node left behind while leaving the corresponding
562 /// edge in its parent node dangling.
565 /// - There must be another KV in the direction travelled.
566 /// - That KV was not previously returned by counterpart
567 /// `deallocating_next_back_unchecked` on any copy of the handles
568 /// being used to traverse the tree.
570 /// The only safe way to proceed with the updated handle is to compare it, drop it,
571 /// or call this method or counterpart `deallocating_next_back_unchecked` again.
572 unsafe fn deallocating_next_unchecked(
574 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
575 super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next().unwrap() })
578 /// Moves the leaf edge handle to the previous leaf edge and returns the key and value
579 /// in between, deallocating any node left behind while leaving the corresponding
580 /// edge in its parent node dangling.
583 /// - There must be another KV in the direction travelled.
584 /// - That leaf edge was not previously returned by counterpart
585 /// `deallocating_next_unchecked` on any copy of the handles
586 /// being used to traverse the tree.
588 /// The only safe way to proceed with the updated handle is to compare it, drop it,
589 /// or call this method or counterpart `deallocating_next_unchecked` again.
590 unsafe fn deallocating_next_back_unchecked(
592 ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
593 super::mem::replace(self, |leaf_edge| unsafe {
594 leaf_edge.deallocating_next_back().unwrap()
599 impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
600 /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
601 /// you need first when navigating forward (or last when navigating backward).
603 pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
607 Leaf(leaf) => return leaf.first_edge(),
608 Internal(internal) => node = internal.first_edge().descend(),
613 /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
614 /// you need last when navigating forward (or first when navigating backward).
616 pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
620 Leaf(leaf) => return leaf.last_edge(),
621 Internal(internal) => node = internal.last_edge().descend(),
627 pub enum Position<BorrowType, K, V> {
628 Leaf(NodeRef<BorrowType, K, V, marker::Leaf>),
629 Internal(NodeRef<BorrowType, K, V, marker::Internal>),
630 InternalKV(Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>),
633 impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
634 /// Visits leaf nodes and internal KVs in order of ascending keys, and also
635 /// visits internal nodes as a whole in a depth first order, meaning that
636 /// internal nodes precede their individual KVs and their child nodes.
637 pub fn visit_nodes_in_order<F>(self, mut visit: F)
639 F: FnMut(Position<marker::Immut<'a>, K, V>),
642 Leaf(leaf) => visit(Position::Leaf(leaf)),
643 Internal(internal) => {
644 visit(Position::Internal(internal));
645 let mut edge = internal.first_edge();
647 edge = match edge.descend().force() {
649 visit(Position::Leaf(leaf));
650 match edge.next_kv() {
652 visit(Position::InternalKV(kv));
658 Internal(internal) => {
659 visit(Position::Internal(internal));
660 internal.first_edge()
668 /// Calculates the number of elements in a (sub)tree.
669 pub fn calc_length(self) -> usize {
671 self.visit_nodes_in_order(|pos| match pos {
672 Position::Leaf(node) => result += node.len(),
673 Position::Internal(node) => result += node.len(),
674 Position::InternalKV(_) => (),
680 impl<BorrowType: marker::BorrowType, K, V>
681 Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>
683 /// Returns the leaf edge closest to a KV for forward navigation.
684 pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
686 Leaf(leaf_kv) => leaf_kv.right_edge(),
687 Internal(internal_kv) => {
688 let next_internal_edge = internal_kv.right_edge();
689 next_internal_edge.descend().first_leaf_edge()
694 /// Returns the leaf edge closest to a KV for backward navigation.
695 fn next_back_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
697 Leaf(leaf_kv) => leaf_kv.left_edge(),
698 Internal(internal_kv) => {
699 let next_internal_edge = internal_kv.left_edge();
700 next_internal_edge.descend().last_leaf_edge()