1 //! A priority queue implemented with a binary heap.
3 //! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
4 //! Checking the largest element is *O*(1). Converting a vector to a binary heap
5 //! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
6 //! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* \* log(*n*))
11 //! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
12 //! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
13 //! It shows how to use [`BinaryHeap`] with custom types.
15 //! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
16 //! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem
17 //! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph
20 //! use std::cmp::Ordering;
21 //! use std::collections::BinaryHeap;
23 //! #[derive(Copy, Clone, Eq, PartialEq)]
29 //! // The priority queue depends on `Ord`.
30 //! // Explicitly implement the trait so the queue becomes a min-heap
31 //! // instead of a max-heap.
32 //! impl Ord for State {
33 //! fn cmp(&self, other: &Self) -> Ordering {
34 //! // Notice that the we flip the ordering on costs.
35 //! // In case of a tie we compare positions - this step is necessary
36 //! // to make implementations of `PartialEq` and `Ord` consistent.
37 //! other.cost.cmp(&self.cost)
38 //! .then_with(|| self.position.cmp(&other.position))
42 //! // `PartialOrd` needs to be implemented as well.
43 //! impl PartialOrd for State {
44 //! fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
45 //! Some(self.cmp(other))
49 //! // Each node is represented as an `usize`, for a shorter implementation.
55 //! // Dijkstra's shortest path algorithm.
57 //! // Start at `start` and use `dist` to track the current shortest distance
58 //! // to each node. This implementation isn't memory-efficient as it may leave duplicate
59 //! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
60 //! // for a simpler implementation.
61 //! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
62 //! // dist[node] = current shortest distance from `start` to `node`
63 //! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
65 //! let mut heap = BinaryHeap::new();
67 //! // We're at `start`, with a zero cost
69 //! heap.push(State { cost: 0, position: start });
71 //! // Examine the frontier with lower cost nodes first (min-heap)
72 //! while let Some(State { cost, position }) = heap.pop() {
73 //! // Alternatively we could have continued to find all shortest paths
74 //! if position == goal { return Some(cost); }
76 //! // Important as we may have already found a better way
77 //! if cost > dist[position] { continue; }
79 //! // For each node we can reach, see if we can find a way with
80 //! // a lower cost going through this node
81 //! for edge in &adj_list[position] {
82 //! let next = State { cost: cost + edge.cost, position: edge.node };
84 //! // If so, add it to the frontier and continue
85 //! if next.cost < dist[next.position] {
87 //! // Relaxation, we have now found a better way
88 //! dist[next.position] = next.cost;
93 //! // Goal not reachable
98 //! // This is the directed graph we're going to use.
99 //! // The node numbers correspond to the different states,
100 //! // and the edge weights symbolize the cost of moving
101 //! // from one node to another.
102 //! // Note that the edges are one-way.
105 //! // +-----------------+
108 //! // 0 -----> 1 -----> 3 ---> 4
112 //! // +------> 2 -------+ |
114 //! // +---------------+
116 //! // The graph is represented as an adjacency list where each index,
117 //! // corresponding to a node value, has a list of outgoing edges.
118 //! // Chosen for its efficiency.
119 //! let graph = vec![
121 //! vec![Edge { node: 2, cost: 10 },
122 //! Edge { node: 1, cost: 1 }],
124 //! vec![Edge { node: 3, cost: 2 }],
126 //! vec![Edge { node: 1, cost: 1 },
127 //! Edge { node: 3, cost: 3 },
128 //! Edge { node: 4, cost: 1 }],
130 //! vec![Edge { node: 0, cost: 7 },
131 //! Edge { node: 4, cost: 2 }],
135 //! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
136 //! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
137 //! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
138 //! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
139 //! assert_eq!(shortest_path(&graph, 4, 0), None);
143 #![allow(missing_docs)]
144 #![stable(feature = "rust1", since = "1.0.0")]
147 use core::iter::{FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen};
148 use core::mem::{self, swap, ManuallyDrop};
149 use core::ops::{Deref, DerefMut};
153 use crate::vec::{self, AsIntoIter, Vec};
155 use super::SpecExtend;
157 /// A priority queue implemented with a binary heap.
159 /// This will be a max-heap.
161 /// It is a logic error for an item to be modified in such a way that the
162 /// item's ordering relative to any other item, as determined by the `Ord`
163 /// trait, changes while it is in the heap. This is normally only possible
164 /// through `Cell`, `RefCell`, global state, I/O, or unsafe code. The
165 /// behavior resulting from such a logic error is not specified, but will
166 /// not result in undefined behavior. This could include panics, incorrect
167 /// results, aborts, memory leaks, and non-termination.
172 /// use std::collections::BinaryHeap;
174 /// // Type inference lets us omit an explicit type signature (which
175 /// // would be `BinaryHeap<i32>` in this example).
176 /// let mut heap = BinaryHeap::new();
178 /// // We can use peek to look at the next item in the heap. In this case,
179 /// // there's no items in there yet so we get None.
180 /// assert_eq!(heap.peek(), None);
182 /// // Let's add some scores...
187 /// // Now peek shows the most important item in the heap.
188 /// assert_eq!(heap.peek(), Some(&5));
190 /// // We can check the length of a heap.
191 /// assert_eq!(heap.len(), 3);
193 /// // We can iterate over the items in the heap, although they are returned in
194 /// // a random order.
196 /// println!("{}", x);
199 /// // If we instead pop these scores, they should come back in order.
200 /// assert_eq!(heap.pop(), Some(5));
201 /// assert_eq!(heap.pop(), Some(2));
202 /// assert_eq!(heap.pop(), Some(1));
203 /// assert_eq!(heap.pop(), None);
205 /// // We can clear the heap of any remaining items.
208 /// // The heap should now be empty.
209 /// assert!(heap.is_empty())
214 /// Either `std::cmp::Reverse` or a custom `Ord` implementation can be used to
215 /// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
216 /// value instead of the greatest one.
219 /// use std::collections::BinaryHeap;
220 /// use std::cmp::Reverse;
222 /// let mut heap = BinaryHeap::new();
224 /// // Wrap values in `Reverse`
225 /// heap.push(Reverse(1));
226 /// heap.push(Reverse(5));
227 /// heap.push(Reverse(2));
229 /// // If we pop these scores now, they should come back in the reverse order.
230 /// assert_eq!(heap.pop(), Some(Reverse(1)));
231 /// assert_eq!(heap.pop(), Some(Reverse(2)));
232 /// assert_eq!(heap.pop(), Some(Reverse(5)));
233 /// assert_eq!(heap.pop(), None);
236 /// # Time complexity
238 /// | [push] | [pop] | [peek]/[peek\_mut] |
239 /// |--------|-----------|--------------------|
240 /// | O(1)~ | *O*(log(*n*)) | *O*(1) |
242 /// The value for `push` is an expected cost; the method documentation gives a
243 /// more detailed analysis.
245 /// [push]: BinaryHeap::push
246 /// [pop]: BinaryHeap::pop
247 /// [peek]: BinaryHeap::peek
248 /// [peek\_mut]: BinaryHeap::peek_mut
249 #[stable(feature = "rust1", since = "1.0.0")]
250 pub struct BinaryHeap<T> {
254 /// Structure wrapping a mutable reference to the greatest item on a
257 /// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
258 /// its documentation for more.
260 /// [`peek_mut`]: BinaryHeap::peek_mut
261 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
262 pub struct PeekMut<'a, T: 'a + Ord> {
263 heap: &'a mut BinaryHeap<T>,
267 #[stable(feature = "collection_debug", since = "1.17.0")]
268 impl<T: Ord + fmt::Debug> fmt::Debug for PeekMut<'_, T> {
269 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
270 f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
274 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
275 impl<T: Ord> Drop for PeekMut<'_, T> {
278 self.heap.sift_down(0);
283 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
284 impl<T: Ord> Deref for PeekMut<'_, T> {
286 fn deref(&self) -> &T {
287 debug_assert!(!self.heap.is_empty());
288 // SAFE: PeekMut is only instantiated for non-empty heaps
289 unsafe { self.heap.data.get_unchecked(0) }
293 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
294 impl<T: Ord> DerefMut for PeekMut<'_, T> {
295 fn deref_mut(&mut self) -> &mut T {
296 debug_assert!(!self.heap.is_empty());
298 // SAFE: PeekMut is only instantiated for non-empty heaps
299 unsafe { self.heap.data.get_unchecked_mut(0) }
303 impl<'a, T: Ord> PeekMut<'a, T> {
304 /// Removes the peeked value from the heap and returns it.
305 #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
306 pub fn pop(mut this: PeekMut<'a, T>) -> T {
307 let value = this.heap.pop().unwrap();
313 #[stable(feature = "rust1", since = "1.0.0")]
314 impl<T: Clone> Clone for BinaryHeap<T> {
315 fn clone(&self) -> Self {
316 BinaryHeap { data: self.data.clone() }
319 fn clone_from(&mut self, source: &Self) {
320 self.data.clone_from(&source.data);
324 #[stable(feature = "rust1", since = "1.0.0")]
325 impl<T: Ord> Default for BinaryHeap<T> {
326 /// Creates an empty `BinaryHeap<T>`.
328 fn default() -> BinaryHeap<T> {
333 #[stable(feature = "binaryheap_debug", since = "1.4.0")]
334 impl<T: fmt::Debug> fmt::Debug for BinaryHeap<T> {
335 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
336 f.debug_list().entries(self.iter()).finish()
340 impl<T: Ord> BinaryHeap<T> {
341 /// Creates an empty `BinaryHeap` as a max-heap.
348 /// use std::collections::BinaryHeap;
349 /// let mut heap = BinaryHeap::new();
352 #[stable(feature = "rust1", since = "1.0.0")]
353 pub fn new() -> BinaryHeap<T> {
354 BinaryHeap { data: vec![] }
357 /// Creates an empty `BinaryHeap` with a specific capacity.
358 /// This preallocates enough memory for `capacity` elements,
359 /// so that the `BinaryHeap` does not have to be reallocated
360 /// until it contains at least that many values.
367 /// use std::collections::BinaryHeap;
368 /// let mut heap = BinaryHeap::with_capacity(10);
371 #[stable(feature = "rust1", since = "1.0.0")]
372 pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
373 BinaryHeap { data: Vec::with_capacity(capacity) }
376 /// Returns a mutable reference to the greatest item in the binary heap, or
377 /// `None` if it is empty.
379 /// Note: If the `PeekMut` value is leaked, the heap may be in an
380 /// inconsistent state.
387 /// use std::collections::BinaryHeap;
388 /// let mut heap = BinaryHeap::new();
389 /// assert!(heap.peek_mut().is_none());
395 /// let mut val = heap.peek_mut().unwrap();
398 /// assert_eq!(heap.peek(), Some(&2));
401 /// # Time complexity
403 /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
404 /// otherwise it's *O*(1).
405 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
406 pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
407 if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: false }) }
410 /// Removes the greatest item from the binary heap and returns it, or `None` if it
418 /// use std::collections::BinaryHeap;
419 /// let mut heap = BinaryHeap::from(vec![1, 3]);
421 /// assert_eq!(heap.pop(), Some(3));
422 /// assert_eq!(heap.pop(), Some(1));
423 /// assert_eq!(heap.pop(), None);
426 /// # Time complexity
428 /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
429 #[stable(feature = "rust1", since = "1.0.0")]
430 pub fn pop(&mut self) -> Option<T> {
431 self.data.pop().map(|mut item| {
432 if !self.is_empty() {
433 swap(&mut item, &mut self.data[0]);
434 self.sift_down_to_bottom(0);
440 /// Pushes an item onto the binary heap.
447 /// use std::collections::BinaryHeap;
448 /// let mut heap = BinaryHeap::new();
453 /// assert_eq!(heap.len(), 3);
454 /// assert_eq!(heap.peek(), Some(&5));
457 /// # Time complexity
459 /// The expected cost of `push`, averaged over every possible ordering of
460 /// the elements being pushed, and over a sufficiently large number of
461 /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
462 /// elements that are *not* already in any sorted pattern.
464 /// The time complexity degrades if elements are pushed in predominantly
465 /// ascending order. In the worst case, elements are pushed in ascending
466 /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
467 /// containing *n* elements.
469 /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
470 /// occurs when capacity is exhausted and needs a resize. The resize cost
471 /// has been amortized in the previous figures.
472 #[stable(feature = "rust1", since = "1.0.0")]
473 pub fn push(&mut self, item: T) {
474 let old_len = self.len();
475 self.data.push(item);
476 self.sift_up(0, old_len);
479 /// Consumes the `BinaryHeap` and returns a vector in sorted
480 /// (ascending) order.
487 /// use std::collections::BinaryHeap;
489 /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
493 /// let vec = heap.into_sorted_vec();
494 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
496 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
497 pub fn into_sorted_vec(mut self) -> Vec<T> {
498 let mut end = self.len();
501 // SAFETY: `end` goes from `self.len() - 1` to 1 (both included),
502 // so it's always a valid index to access.
503 // It is safe to access index 0 (i.e. `ptr`), because
504 // 1 <= end < self.len(), which means self.len() >= 2.
506 let ptr = self.data.as_mut_ptr();
507 ptr::swap(ptr, ptr.add(end));
509 self.sift_down_range(0, end);
514 // The implementations of sift_up and sift_down use unsafe blocks in
515 // order to move an element out of the vector (leaving behind a
516 // hole), shift along the others and move the removed element back into the
517 // vector at the final location of the hole.
518 // The `Hole` type is used to represent this, and make sure
519 // the hole is filled back at the end of its scope, even on panic.
520 // Using a hole reduces the constant factor compared to using swaps,
521 // which involves twice as many moves.
522 fn sift_up(&mut self, start: usize, pos: usize) -> usize {
524 // Take out the value at `pos` and create a hole.
525 let mut hole = Hole::new(&mut self.data, pos);
527 while hole.pos() > start {
528 let parent = (hole.pos() - 1) / 2;
529 if hole.element() <= hole.get(parent) {
532 hole.move_to(parent);
538 /// Take an element at `pos` and move it down the heap,
539 /// while its children are larger.
540 fn sift_down_range(&mut self, pos: usize, end: usize) {
542 let mut hole = Hole::new(&mut self.data, pos);
543 let mut child = 2 * pos + 1;
544 while child < end - 1 {
545 // compare with the greater of the two children
546 child += (hole.get(child) <= hole.get(child + 1)) as usize;
547 // if we are already in order, stop.
548 if hole.element() >= hole.get(child) {
552 child = 2 * hole.pos() + 1;
554 if child == end - 1 && hole.element() < hole.get(child) {
560 fn sift_down(&mut self, pos: usize) {
561 let len = self.len();
562 self.sift_down_range(pos, len);
565 /// Take an element at `pos` and move it all the way down the heap,
566 /// then sift it up to its position.
568 /// Note: This is faster when the element is known to be large / should
569 /// be closer to the bottom.
570 fn sift_down_to_bottom(&mut self, mut pos: usize) {
571 let end = self.len();
574 let mut hole = Hole::new(&mut self.data, pos);
575 let mut child = 2 * pos + 1;
576 while child < end - 1 {
577 child += (hole.get(child) <= hole.get(child + 1)) as usize;
579 child = 2 * hole.pos() + 1;
581 if child == end - 1 {
586 self.sift_up(start, pos);
589 fn rebuild(&mut self) {
590 let mut n = self.len() / 2;
597 /// Moves all the elements of `other` into `self`, leaving `other` empty.
604 /// use std::collections::BinaryHeap;
606 /// let v = vec![-10, 1, 2, 3, 3];
607 /// let mut a = BinaryHeap::from(v);
609 /// let v = vec![-20, 5, 43];
610 /// let mut b = BinaryHeap::from(v);
612 /// a.append(&mut b);
614 /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
615 /// assert!(b.is_empty());
617 #[stable(feature = "binary_heap_append", since = "1.11.0")]
618 pub fn append(&mut self, other: &mut Self) {
619 if self.len() < other.len() {
623 if other.is_empty() {
628 fn log2_fast(x: usize) -> usize {
629 (usize::BITS - x.leading_zeros() - 1) as usize
632 // `rebuild` takes O(len1 + len2) operations
633 // and about 2 * (len1 + len2) comparisons in the worst case
634 // while `extend` takes O(len2 * log(len1)) operations
635 // and about 1 * len2 * log_2(len1) comparisons in the worst case,
636 // assuming len1 >= len2. For larger heaps, the crossover point
637 // no longer follows this reasoning and was determined empirically.
639 fn better_to_rebuild(len1: usize, len2: usize) -> bool {
640 let tot_len = len1 + len2;
642 2 * tot_len < len2 * log2_fast(len1)
644 2 * tot_len < len2 * 11
648 if better_to_rebuild(self.len(), other.len()) {
649 self.data.append(&mut other.data);
652 self.extend(other.drain());
656 /// Returns an iterator which retrieves elements in heap order.
657 /// The retrieved elements are removed from the original heap.
658 /// The remaining elements will be removed on drop in heap order.
661 /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
662 /// You should use the latter for most cases.
669 /// #![feature(binary_heap_drain_sorted)]
670 /// use std::collections::BinaryHeap;
672 /// let mut heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
673 /// assert_eq!(heap.len(), 5);
675 /// drop(heap.drain_sorted()); // removes all elements in heap order
676 /// assert_eq!(heap.len(), 0);
679 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
680 pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> {
681 DrainSorted { inner: self }
684 /// Retains only the elements specified by the predicate.
686 /// In other words, remove all elements `e` such that `f(&e)` returns
687 /// `false`. The elements are visited in unsorted (and unspecified) order.
694 /// #![feature(binary_heap_retain)]
695 /// use std::collections::BinaryHeap;
697 /// let mut heap = BinaryHeap::from(vec![-10, -5, 1, 2, 4, 13]);
699 /// heap.retain(|x| x % 2 == 0); // only keep even numbers
701 /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
703 #[unstable(feature = "binary_heap_retain", issue = "71503")]
704 pub fn retain<F>(&mut self, f: F)
706 F: FnMut(&T) -> bool,
713 impl<T> BinaryHeap<T> {
714 /// Returns an iterator visiting all values in the underlying vector, in
722 /// use std::collections::BinaryHeap;
723 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
725 /// // Print 1, 2, 3, 4 in arbitrary order
726 /// for x in heap.iter() {
727 /// println!("{}", x);
730 #[stable(feature = "rust1", since = "1.0.0")]
731 pub fn iter(&self) -> Iter<'_, T> {
732 Iter { iter: self.data.iter() }
735 /// Returns an iterator which retrieves elements in heap order.
736 /// This method consumes the original heap.
743 /// #![feature(binary_heap_into_iter_sorted)]
744 /// use std::collections::BinaryHeap;
745 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
747 /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), vec![5, 4]);
749 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
750 pub fn into_iter_sorted(self) -> IntoIterSorted<T> {
751 IntoIterSorted { inner: self }
754 /// Returns the greatest item in the binary heap, or `None` if it is empty.
761 /// use std::collections::BinaryHeap;
762 /// let mut heap = BinaryHeap::new();
763 /// assert_eq!(heap.peek(), None);
768 /// assert_eq!(heap.peek(), Some(&5));
772 /// # Time complexity
774 /// Cost is *O*(1) in the worst case.
775 #[stable(feature = "rust1", since = "1.0.0")]
776 pub fn peek(&self) -> Option<&T> {
780 /// Returns the number of elements the binary heap can hold without reallocating.
787 /// use std::collections::BinaryHeap;
788 /// let mut heap = BinaryHeap::with_capacity(100);
789 /// assert!(heap.capacity() >= 100);
792 #[stable(feature = "rust1", since = "1.0.0")]
793 pub fn capacity(&self) -> usize {
797 /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
798 /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
800 /// Note that the allocator may give the collection more space than it requests. Therefore
801 /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
802 /// insertions are expected.
806 /// Panics if the new capacity overflows `usize`.
813 /// use std::collections::BinaryHeap;
814 /// let mut heap = BinaryHeap::new();
815 /// heap.reserve_exact(100);
816 /// assert!(heap.capacity() >= 100);
820 /// [`reserve`]: BinaryHeap::reserve
821 #[stable(feature = "rust1", since = "1.0.0")]
822 pub fn reserve_exact(&mut self, additional: usize) {
823 self.data.reserve_exact(additional);
826 /// Reserves capacity for at least `additional` more elements to be inserted in the
827 /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
831 /// Panics if the new capacity overflows `usize`.
838 /// use std::collections::BinaryHeap;
839 /// let mut heap = BinaryHeap::new();
840 /// heap.reserve(100);
841 /// assert!(heap.capacity() >= 100);
844 #[stable(feature = "rust1", since = "1.0.0")]
845 pub fn reserve(&mut self, additional: usize) {
846 self.data.reserve(additional);
849 /// Discards as much additional capacity as possible.
856 /// use std::collections::BinaryHeap;
857 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
859 /// assert!(heap.capacity() >= 100);
860 /// heap.shrink_to_fit();
861 /// assert!(heap.capacity() == 0);
863 #[stable(feature = "rust1", since = "1.0.0")]
864 pub fn shrink_to_fit(&mut self) {
865 self.data.shrink_to_fit();
868 /// Discards capacity with a lower bound.
870 /// The capacity will remain at least as large as both the length
871 /// and the supplied value.
873 /// If the current capacity is less than the lower limit, this is a no-op.
878 /// #![feature(shrink_to)]
879 /// use std::collections::BinaryHeap;
880 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
882 /// assert!(heap.capacity() >= 100);
883 /// heap.shrink_to(10);
884 /// assert!(heap.capacity() >= 10);
887 #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
888 pub fn shrink_to(&mut self, min_capacity: usize) {
889 self.data.shrink_to(min_capacity)
892 /// Returns a slice of all values in the underlying vector, in arbitrary
900 /// #![feature(binary_heap_as_slice)]
901 /// use std::collections::BinaryHeap;
902 /// use std::io::{self, Write};
904 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
906 /// io::sink().write(heap.as_slice()).unwrap();
908 #[unstable(feature = "binary_heap_as_slice", issue = "83659")]
909 pub fn as_slice(&self) -> &[T] {
913 /// Consumes the `BinaryHeap` and returns the underlying vector
914 /// in arbitrary order.
921 /// use std::collections::BinaryHeap;
922 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
923 /// let vec = heap.into_vec();
925 /// // Will print in some order
927 /// println!("{}", x);
930 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
931 pub fn into_vec(self) -> Vec<T> {
935 /// Returns the length of the binary heap.
942 /// use std::collections::BinaryHeap;
943 /// let heap = BinaryHeap::from(vec![1, 3]);
945 /// assert_eq!(heap.len(), 2);
947 #[doc(alias = "length")]
948 #[stable(feature = "rust1", since = "1.0.0")]
949 pub fn len(&self) -> usize {
953 /// Checks if the binary heap is empty.
960 /// use std::collections::BinaryHeap;
961 /// let mut heap = BinaryHeap::new();
963 /// assert!(heap.is_empty());
969 /// assert!(!heap.is_empty());
971 #[stable(feature = "rust1", since = "1.0.0")]
972 pub fn is_empty(&self) -> bool {
976 /// Clears the binary heap, returning an iterator over the removed elements.
978 /// The elements are removed in arbitrary order.
985 /// use std::collections::BinaryHeap;
986 /// let mut heap = BinaryHeap::from(vec![1, 3]);
988 /// assert!(!heap.is_empty());
990 /// for x in heap.drain() {
991 /// println!("{}", x);
994 /// assert!(heap.is_empty());
997 #[stable(feature = "drain", since = "1.6.0")]
998 pub fn drain(&mut self) -> Drain<'_, T> {
999 Drain { iter: self.data.drain(..) }
1002 /// Drops all items from the binary heap.
1009 /// use std::collections::BinaryHeap;
1010 /// let mut heap = BinaryHeap::from(vec![1, 3]);
1012 /// assert!(!heap.is_empty());
1016 /// assert!(heap.is_empty());
1018 #[stable(feature = "rust1", since = "1.0.0")]
1019 pub fn clear(&mut self) {
1024 /// Hole represents a hole in a slice i.e., an index without valid value
1025 /// (because it was moved from or duplicated).
1026 /// In drop, `Hole` will restore the slice by filling the hole
1027 /// position with the value that was originally removed.
1028 struct Hole<'a, T: 'a> {
1030 elt: ManuallyDrop<T>,
1034 impl<'a, T> Hole<'a, T> {
1035 /// Create a new `Hole` at index `pos`.
1037 /// Unsafe because pos must be within the data slice.
1039 unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
1040 debug_assert!(pos < data.len());
1041 // SAFE: pos should be inside the slice
1042 let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
1043 Hole { data, elt: ManuallyDrop::new(elt), pos }
1047 fn pos(&self) -> usize {
1051 /// Returns a reference to the element removed.
1053 fn element(&self) -> &T {
1057 /// Returns a reference to the element at `index`.
1059 /// Unsafe because index must be within the data slice and not equal to pos.
1061 unsafe fn get(&self, index: usize) -> &T {
1062 debug_assert!(index != self.pos);
1063 debug_assert!(index < self.data.len());
1064 unsafe { self.data.get_unchecked(index) }
1067 /// Move hole to new location
1069 /// Unsafe because index must be within the data slice and not equal to pos.
1071 unsafe fn move_to(&mut self, index: usize) {
1072 debug_assert!(index != self.pos);
1073 debug_assert!(index < self.data.len());
1075 let ptr = self.data.as_mut_ptr();
1076 let index_ptr: *const _ = ptr.add(index);
1077 let hole_ptr = ptr.add(self.pos);
1078 ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1084 impl<T> Drop for Hole<'_, T> {
1086 fn drop(&mut self) {
1087 // fill the hole again
1090 ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
1095 /// An iterator over the elements of a `BinaryHeap`.
1097 /// This `struct` is created by [`BinaryHeap::iter()`]. See its
1098 /// documentation for more.
1100 /// [`iter`]: BinaryHeap::iter
1101 #[stable(feature = "rust1", since = "1.0.0")]
1102 pub struct Iter<'a, T: 'a> {
1103 iter: slice::Iter<'a, T>,
1106 #[stable(feature = "collection_debug", since = "1.17.0")]
1107 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
1108 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1109 f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
1113 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
1114 #[stable(feature = "rust1", since = "1.0.0")]
1115 impl<T> Clone for Iter<'_, T> {
1116 fn clone(&self) -> Self {
1117 Iter { iter: self.iter.clone() }
1121 #[stable(feature = "rust1", since = "1.0.0")]
1122 impl<'a, T> Iterator for Iter<'a, T> {
1126 fn next(&mut self) -> Option<&'a T> {
1131 fn size_hint(&self) -> (usize, Option<usize>) {
1132 self.iter.size_hint()
1136 fn last(self) -> Option<&'a T> {
1141 #[stable(feature = "rust1", since = "1.0.0")]
1142 impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
1144 fn next_back(&mut self) -> Option<&'a T> {
1145 self.iter.next_back()
1149 #[stable(feature = "rust1", since = "1.0.0")]
1150 impl<T> ExactSizeIterator for Iter<'_, T> {
1151 fn is_empty(&self) -> bool {
1152 self.iter.is_empty()
1156 #[stable(feature = "fused", since = "1.26.0")]
1157 impl<T> FusedIterator for Iter<'_, T> {}
1159 /// An owning iterator over the elements of a `BinaryHeap`.
1161 /// This `struct` is created by [`BinaryHeap::into_iter()`]
1162 /// (provided by the `IntoIterator` trait). See its documentation for more.
1164 /// [`into_iter`]: BinaryHeap::into_iter
1165 #[stable(feature = "rust1", since = "1.0.0")]
1167 pub struct IntoIter<T> {
1168 iter: vec::IntoIter<T>,
1171 #[stable(feature = "collection_debug", since = "1.17.0")]
1172 impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
1173 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1174 f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
1178 #[stable(feature = "rust1", since = "1.0.0")]
1179 impl<T> Iterator for IntoIter<T> {
1183 fn next(&mut self) -> Option<T> {
1188 fn size_hint(&self) -> (usize, Option<usize>) {
1189 self.iter.size_hint()
1193 #[stable(feature = "rust1", since = "1.0.0")]
1194 impl<T> DoubleEndedIterator for IntoIter<T> {
1196 fn next_back(&mut self) -> Option<T> {
1197 self.iter.next_back()
1201 #[stable(feature = "rust1", since = "1.0.0")]
1202 impl<T> ExactSizeIterator for IntoIter<T> {
1203 fn is_empty(&self) -> bool {
1204 self.iter.is_empty()
1208 #[stable(feature = "fused", since = "1.26.0")]
1209 impl<T> FusedIterator for IntoIter<T> {}
1211 #[unstable(issue = "none", feature = "inplace_iteration")]
1212 unsafe impl<T> SourceIter for IntoIter<T> {
1213 type Source = IntoIter<T>;
1216 unsafe fn as_inner(&mut self) -> &mut Self::Source {
1221 #[unstable(issue = "none", feature = "inplace_iteration")]
1222 unsafe impl<I> InPlaceIterable for IntoIter<I> {}
1224 impl<I> AsIntoIter for IntoIter<I> {
1227 fn as_into_iter(&mut self) -> &mut vec::IntoIter<Self::Item> {
1232 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1233 #[derive(Clone, Debug)]
1234 pub struct IntoIterSorted<T> {
1235 inner: BinaryHeap<T>,
1238 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1239 impl<T: Ord> Iterator for IntoIterSorted<T> {
1243 fn next(&mut self) -> Option<T> {
1248 fn size_hint(&self) -> (usize, Option<usize>) {
1249 let exact = self.inner.len();
1250 (exact, Some(exact))
1254 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1255 impl<T: Ord> ExactSizeIterator for IntoIterSorted<T> {}
1257 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1258 impl<T: Ord> FusedIterator for IntoIterSorted<T> {}
1260 #[unstable(feature = "trusted_len", issue = "37572")]
1261 unsafe impl<T: Ord> TrustedLen for IntoIterSorted<T> {}
1263 /// A draining iterator over the elements of a `BinaryHeap`.
1265 /// This `struct` is created by [`BinaryHeap::drain()`]. See its
1266 /// documentation for more.
1268 /// [`drain`]: BinaryHeap::drain
1269 #[stable(feature = "drain", since = "1.6.0")]
1271 pub struct Drain<'a, T: 'a> {
1272 iter: vec::Drain<'a, T>,
1275 #[stable(feature = "drain", since = "1.6.0")]
1276 impl<T> Iterator for Drain<'_, T> {
1280 fn next(&mut self) -> Option<T> {
1285 fn size_hint(&self) -> (usize, Option<usize>) {
1286 self.iter.size_hint()
1290 #[stable(feature = "drain", since = "1.6.0")]
1291 impl<T> DoubleEndedIterator for Drain<'_, T> {
1293 fn next_back(&mut self) -> Option<T> {
1294 self.iter.next_back()
1298 #[stable(feature = "drain", since = "1.6.0")]
1299 impl<T> ExactSizeIterator for Drain<'_, T> {
1300 fn is_empty(&self) -> bool {
1301 self.iter.is_empty()
1305 #[stable(feature = "fused", since = "1.26.0")]
1306 impl<T> FusedIterator for Drain<'_, T> {}
1308 /// A draining iterator over the elements of a `BinaryHeap`.
1310 /// This `struct` is created by [`BinaryHeap::drain_sorted()`]. See its
1311 /// documentation for more.
1313 /// [`drain_sorted`]: BinaryHeap::drain_sorted
1314 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1316 pub struct DrainSorted<'a, T: Ord> {
1317 inner: &'a mut BinaryHeap<T>,
1320 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1321 impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
1322 /// Removes heap elements in heap order.
1323 fn drop(&mut self) {
1324 struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>);
1326 impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> {
1327 fn drop(&mut self) {
1328 while self.0.inner.pop().is_some() {}
1332 while let Some(item) = self.inner.pop() {
1333 let guard = DropGuard(self);
1340 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1341 impl<T: Ord> Iterator for DrainSorted<'_, T> {
1345 fn next(&mut self) -> Option<T> {
1350 fn size_hint(&self) -> (usize, Option<usize>) {
1351 let exact = self.inner.len();
1352 (exact, Some(exact))
1356 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1357 impl<T: Ord> ExactSizeIterator for DrainSorted<'_, T> {}
1359 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1360 impl<T: Ord> FusedIterator for DrainSorted<'_, T> {}
1362 #[unstable(feature = "trusted_len", issue = "37572")]
1363 unsafe impl<T: Ord> TrustedLen for DrainSorted<'_, T> {}
1365 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1366 impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
1367 /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
1369 /// This conversion happens in-place, and has *O*(*n*) time complexity.
1370 fn from(vec: Vec<T>) -> BinaryHeap<T> {
1371 let mut heap = BinaryHeap { data: vec };
1377 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1378 impl<T> From<BinaryHeap<T>> for Vec<T> {
1379 /// Converts a `BinaryHeap<T>` into a `Vec<T>`.
1381 /// This conversion requires no data movement or allocation, and has
1382 /// constant time complexity.
1383 fn from(heap: BinaryHeap<T>) -> Vec<T> {
1388 #[stable(feature = "rust1", since = "1.0.0")]
1389 impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
1390 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
1391 BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
1395 #[stable(feature = "rust1", since = "1.0.0")]
1396 impl<T> IntoIterator for BinaryHeap<T> {
1398 type IntoIter = IntoIter<T>;
1400 /// Creates a consuming iterator, that is, one that moves each value out of
1401 /// the binary heap in arbitrary order. The binary heap cannot be used
1402 /// after calling this.
1409 /// use std::collections::BinaryHeap;
1410 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
1412 /// // Print 1, 2, 3, 4 in arbitrary order
1413 /// for x in heap.into_iter() {
1414 /// // x has type i32, not &i32
1415 /// println!("{}", x);
1418 fn into_iter(self) -> IntoIter<T> {
1419 IntoIter { iter: self.data.into_iter() }
1423 #[stable(feature = "rust1", since = "1.0.0")]
1424 impl<'a, T> IntoIterator for &'a BinaryHeap<T> {
1426 type IntoIter = Iter<'a, T>;
1428 fn into_iter(self) -> Iter<'a, T> {
1433 #[stable(feature = "rust1", since = "1.0.0")]
1434 impl<T: Ord> Extend<T> for BinaryHeap<T> {
1436 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1437 <Self as SpecExtend<I>>::spec_extend(self, iter);
1441 fn extend_one(&mut self, item: T) {
1446 fn extend_reserve(&mut self, additional: usize) {
1447 self.reserve(additional);
1451 impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
1452 default fn spec_extend(&mut self, iter: I) {
1453 self.extend_desugared(iter.into_iter());
1457 impl<T: Ord> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
1458 fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
1463 impl<T: Ord> BinaryHeap<T> {
1464 fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1465 let iterator = iter.into_iter();
1466 let (lower, _) = iterator.size_hint();
1468 self.reserve(lower);
1470 iterator.for_each(move |elem| self.push(elem));
1474 #[stable(feature = "extend_ref", since = "1.2.0")]
1475 impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
1476 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
1477 self.extend(iter.into_iter().cloned());
1481 fn extend_one(&mut self, &item: &'a T) {
1486 fn extend_reserve(&mut self, additional: usize) {
1487 self.reserve(additional);